EchoSpike Predictive Plasticity¶

In [ ]:
import matplotlib.pyplot as plt
import os
from utils import  get_accuracy, get_samples, train_out_proj_fast, train_out_proj_closed_form
from main import Args
from data import load_SHD
from model import EchoSpike, simple_out
import numpy as np
from data import augment_shd
import torch
import seaborn as sns
from scipy.signal import savgol_filter
from tqdm.notebook import trange
from matplotlib import pyplot
pyplot.rcParams['figure.dpi'] = 600
plt.rcParams.update({'font.size': 18})
import pickle
torch.manual_seed(0)
color_list = sns.color_palette('muted')
device = 'cpu'
batch_size = 128
folder = 'models/'
model_name = 'shd_1layer_large.pt'
with open(folder + model_name[:-3] + '_args.pkl', 'rb') as f:
    args = pickle.load(f)
# args = Args()
online = args.online
print(vars(args))
{'model_name': 'shd_1layer_large', 'dataset': 'shd', 'online': True, 'device': 'cuda', 'recurrency_type': 'none', 'lr': 0.0001, 'epochs': 1000, 'augment': True, 'batch_size': 128, 'n_hidden': [1332], 'inp_thr': 0.05, 'c_y': [1.5, -1.5], 'n_inputs': 700, 'n_outputs': 20, 'n_time_bins': 100, 'beta': 0.95}

Dataset¶

Spiking Heidelberg Digits

In [ ]:
#train_loader, test_loader = load_PMNIST(n_time_bins, scale=0.9, patches=True) #load_NMNIST(n_time_bins, batch_size=batch_size)
n_time_bins = 100
train_loader, test_loader = load_SHD(batch_size=batch_size) #load_NMNIST(n_time_bins, batch_size=batch_size)
# Plot Example(s)
for i in range(1):
    frames, target = train_loader.next_item(-1, contrastive=True)
    plt.figure(figsize=(10, 10))
    plt.axis('off')
    plt.imshow(frames.squeeze(1).T)
    # plt.colorbar()
    print(frames.shape, target)
plt.axis('on')
/home/lars/ownCloud/ETH/Master/Project_2/SNN_CLAPP/data.py:28: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
  self.y = torch.tensor(y)
torch.Size([100, 1, 700]) tensor([4.])
Out[ ]:
(-0.5, 99.5, 699.5, -0.5)

Load pretrained model¶

In [ ]:
SNN = EchoSpike(args.n_inputs, args.n_hidden, beta=args.beta, c_y=args.c_y, device=device, recurrency_type=args.recurrency_type, online=args.online).to(device)
SNN.load_state_dict(torch.load(folder+model_name, map_location=device))
# train(SNN, train_loader, args.epochs, device, args.model_name,
                            # batch_size=args.batch_size, online=args.online, lr=1e-8, augment=args.augment)
from_epoch = 0
echo_train_loss = torch.load(folder+model_name[:-3]+'_loss_hist.pt', map_location='cpu')[int(from_epoch*len(train_loader)/args.batch_size):]
print(echo_train_loss.shape)
for i in range(echo_train_loss.shape[-1]):
    plt.plot(from_epoch+(args.batch_size*np.arange(echo_train_loss.shape[0])/len(train_loader)), savgol_filter(echo_train_loss[:,i], 99, 1), color=color_list[i])
plt.legend([f'layer {i+1}' for i in range(len(SNN.layers))])
# no y ticks, because it's not really meaningful
plt.yticks([])
plt.xlabel('Epoch')
plt.ylabel('EchoSpike Loss')
torch.Size([63719, 1])
Out[ ]:
Text(0, 0.5, 'EchoSpike Loss')
In [ ]:
# plotting adaptive threshold and update rate for an example
while True:
    init_echo, label_0 = train_loader.next_item(-1, contrastive=True)
    sample_1, label_1 = train_loader.next_item(-1, contrastive=True)
    sample_2, label_2 = train_loader.next_item(label_1, contrastive=False)
    if label_0 == 9 and label_1 == 7:
        break
print(label_0, label_1, label_2)
SNN.eval()
with torch.no_grad():
    # feed first sample to get initial activity
    for t in range(100):
        inp_activity = init_echo[t].mean(axis=-1)
        SNN(init_echo[t], torch.tensor(-1, device=device), inp_activity=inp_activity)
    SNN.reset(-1)
    # feed second sample to get the update rates and thresholds for contrastive case
    contrastive_thresholds = torch.zeros(100)
    contrastive_temp_sim = torch.zeros((len(SNN.layers), 100))
    for t in range(100):
        inp_activity = sample_1[t].mean(axis=-1)
        out_spk, mems, losses = SNN(sample_1[t], torch.tensor(-1, device=device), inp_activity=inp_activity)
        contrastive_thresholds[t] = inp_activity * args.c_y[1]
        contrastive_temp_sim[:, t] = losses
    SNN.reset(-1)
    # feed third sample to get the update rates and thresholds for predictive case
    predictive_thresholds = torch.zeros(100)
    predictive_temp_sim = torch.zeros((len(SNN.layers), 100))
    for t in range(100):
        inp_activity = sample_2[t].mean(axis=-1)
        out_spk, mems, losses = SNN(sample_1[t], torch.tensor(1, device=device), inp_activity=inp_activity)
        predictive_thresholds[t] = inp_activity * args.c_y[0]
        predictive_temp_sim[:, t] = -losses
    SNN.reset(1)
    # plot thresholds, with sample as background
    layer = 3
    fig, ax = plt.subplots(figsize=(10, 5))
    ax2  = ax.twinx()
    # imshow in background
    ax.imshow(sample_1.squeeze(1).T, aspect='auto', cmap='Reds')
    ax2.plot(-contrastive_temp_sim[layer], color='r', label='Negative Similarity Score')
    ax2.plot(contrastive_thresholds, color='r', linestyle='--', label='Adaptive Threshold')
    ax2.hlines(args.inp_thr*args.c_y[1], 0, 100, color='r', linestyle=':', label='Input Threshold (times c(-1))')
    # highlight regions where the thresholds are crossed
    argwhere = np.argwhere(np.logical_and((-contrastive_temp_sim[layer] < contrastive_thresholds).numpy(), contrastive_thresholds.numpy() < args.inp_thr*args.c_y[1]))
    for i in range(argwhere.shape[0]):
        ax2.axvspan(argwhere[i], argwhere[i]+1, color='r', alpha=0.2, lw=0)

    ax.yaxis.set_visible(False)
    ax2.spines['right'].set_visible(False)
    ax2.yaxis.tick_left()
    ax2.yaxis.set_label_position('left')
    ax2.set_xlim(ax.get_xlim())
    # get rid of right margin
    ax2.margins(0)
    ax.set_xlabel('Timesteps')
    plt.ylabel('Thresholds & Similarity Score')
    plt.xlim(0, 100)
    plt.legend()
    # same for predictive
    fig, ax = plt.subplots(figsize=(10, 5))
    ax2  = ax.twinx()
    # imshow in background
    ax.imshow(sample_2.squeeze(1).T, aspect='auto', cmap='Blues')
    ax2.plot(predictive_temp_sim[layer], color='b', label='Similarity Score')
    ax2.plot(predictive_thresholds, color='b', linestyle='--', label='Adaptive Threshold')
    ax2.hlines(args.inp_thr*args.c_y[0], 0, 100, color='b', linestyle=':', label='Input Threshold (times c(1))')
    # highlight regions where the thresholds are crossed
    argwhere = np.argwhere(np.logical_and((predictive_temp_sim[layer] < predictive_thresholds).numpy(), predictive_thresholds.numpy() > args.inp_thr*args.c_y[0]))
    for i in range(argwhere.shape[0]):
        ax2.axvspan(argwhere[i], argwhere[i]+1, color='b', alpha=0.1, lw=0)
    ax.yaxis.set_visible(False)
    ax2.spines['right'].set_visible(False)
    ax2.yaxis.tick_left()
    ax2.yaxis.set_label_position('left')
    ax2.set_xlim(ax.get_xlim())
    # get rid of right margin
    #ax2.margins(0)
    ax.set_xlabel('Timesteps')
    plt.ylabel('Thresholds & Similarity Score')
    plt.xlim(0, 100)
    plt.legend()
    plt.show()
tensor([9.]) tensor([7.]) tensor([7.])
---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
Input In [4], in <cell line: 10>()
     38 # imshow in background
     39 ax.imshow(sample_1.squeeze(1).T, aspect='auto', cmap='Reds')
---> 40 ax2.plot(-contrastive_temp_sim[layer], color='r', label='Negative Similarity Score')
     41 ax2.plot(contrastive_thresholds, color='r', linestyle='--', label='Adaptive Threshold')
     42 ax2.hlines(args.inp_thr*args.c_y[1], 0, 100, color='r', linestyle=':', label='Input Threshold (times c(-1))')

IndexError: index 3 is out of bounds for dimension 0 with size 1

Analyze Weights Directly¶

In [ ]:
layers = [SNN.layers[0].fc.weight[:,:args.n_inputs]]
for i in range(1, len(SNN.layers)):
    layers.append(SNN.layers[i].fc.weight[:,:args.n_hidden[i-1]] @ layers[-1])

for i in range(len(SNN.layers)):
    plt.figure()
    plt.imshow(SNN.layers[i].fc.weight.detach(), cmap='viridis')
    plt.colorbar()
    # plt.figure()
    # plt.imshow(SNN.layers[i].pred.weight.detach(), vmax=0.5, vmin=-0.5)
    # plt.colorbar()
for lay in layers:
    plt.figure()
    plt.imshow(lay.detach())
    plt.colorbar()

Train output Projection¶

In [ ]:
from tqdm.notebook import tqdm
def train_out_proj(epochs, batch, cat, out_projs=None):
    # train output projections from all layers (and no layer)
    losses_out = []
    beta = 1.0
    lr = 1e-4
    augment = True
    optimizers = []
    print_interval = 10*batch
    if out_projs is None:
        out_projs = []
        out_proj_0 = simple_out(700, 20, beta=beta)
    else:
        for out_p in out_projs:
            out_p.train()
            out_p.reset()
        out_proj_0 = out_projs[0]
        out_projs = out_projs[1:]
    optim_0 = torch.optim.Adam(out_proj_0.parameters(), lr=lr)
    for lay in range(len(SNN.layers)):
        if len(out_projs) <= lay:
            if cat:
                out_projs.append(simple_out(sum(args.n_hidden[:lay+1])+700, 20, beta=beta))
            else:
                out_projs.append(simple_out(args.n_hidden[lay], 20, beta=beta))
        optimizers.append(torch.optim.Adam(out_projs[lay].parameters(), lr=lr))
        optimizers[-1].zero_grad()
    SNN.eval()
    acc = []
    target = batch_size*[-1]
    correct = (len(SNN.layers) + 1)*[0]
    with torch.no_grad():
        pbar = tqdm(total=len(train_loader)*epochs)
        while len(losses_out)*batch < len(train_loader)*epochs:
            data, target = train_loader.next_item(target, contrastive=True)
            SNN.reset(0)
            logit_lists = [[] for _ in range(len(SNN.layers)+1)]
            data = data.squeeze()
            if augment:
                data = augment_shd(data)
            for step in range(data.shape[0]):
                data_step = data[step].float().to(device)
                target = target.to(device)
                logits, _, _ = SNN(data_step, 0)
                if step == args.n_time_bins-1:
                    _, logts = out_proj_0(data_step, target)
                    logit_lists[0] = logts
                    for lay in range(len(SNN.layers)):
                        if cat:
                            data_step = torch.cat([data_step, logits[lay]], dim=-1)
                        else:
                            data_step = logits[lay]
                        _, logts = out_projs[lay](data_step, target)
                        logit_lists[lay+1] = logts
                else:
                    out_proj_0(data_step, None)
                    for lay in range(len(SNN.layers)):
                        if cat:
                            data_step = torch.cat([data_step, logits[lay]], dim=-1)
                        else:
                            data_step = logits[lay]
                        out_projs[lay](data_step, None)
            
            preds = [logit_lists[lay].argmax(axis=-1) for lay in range(len(SNN.layers)+1)]
            correct = [correct[lay] + (preds[lay] == target).sum() for lay in range(len(SNN.layers)+1)]
            out_proj_0.reset()
            for i, out_proj in enumerate(out_projs):
                out_proj.reset()

            losses_out.append(torch.tensor([torch.nn.functional.cross_entropy(logit_lists[lay], target.squeeze().long()) for lay in range(len(SNN.layers)+1)], requires_grad=False))

            optim_0.step()
            optim_0.zero_grad()
            for opt in optimizers:
                opt.step()
                opt.zero_grad()
            
            if len(losses_out)*batch % print_interval == 0:
                pbar.write(f'Cross Entropy Loss: {(torch.stack(losses_out)[-print_interval//batch:].sum(dim=0)/(print_interval//batch)).numpy()}\n' +
                           f'Correct: {100*np.array(correct)/print_interval}%')
                acc.append(np.array(correct)/print_interval)
                correct = (len(SNN.layers) + 1)*[0]
            pbar.update(batch)
    return [out_proj_0, *out_projs], np.asarray(acc), torch.stack(losses_out)

with torch.no_grad():
    if args.augment:
        n_epochs = 100
        cat = True
        # if already trained, load the output projections
        if os.path.exists(folder+model_name[:-3]+'_out_projs.pt') and False:
            out_projs = torch.load(folder+model_name[:-3]+'_out_projs.pt', map_location=device)
        else:
            out_projs, acc, losses_out = train_out_proj(n_epochs, batch_size, cat)
            test_acc = get_accuracy(SNN, out_projs, test_loader, device, cat, )
            train_acc = get_accuracy(SNN, out_projs, train_loader, device, cat)
            torch.save(out_projs, folder+model_name[:-3]+'_out_projs.pt')
  0%|          | 0/815600 [00:00<?, ?it/s]
Cross Entropy Loss: [14.313995  9.099342]
Correct: [5.625    5.859375]%
Cross Entropy Loss: [8.187934  6.3089476]
Correct: [4.921875 6.484375]%
Cross Entropy Loss: [6.951187 5.181357]
Correct: [ 4.53125  10.078125]%
Cross Entropy Loss: [5.946702 4.473062]
Correct: [ 7.890625 10.234375]%
Cross Entropy Loss: [5.4507284 3.974528 ]
Correct: [ 7.265625 13.046875]%
Cross Entropy Loss: [4.881641  3.6358018]
Correct: [ 8.828125 14.6875  ]%
Cross Entropy Loss: [4.61839   3.4733715]
Correct: [ 9.84375  18.203125]%
Cross Entropy Loss: [4.3676906 3.2100844]
Correct: [ 9.0625  17.03125]%
Cross Entropy Loss: [4.2846184 3.0249293]
Correct: [10.78125 23.28125]%
Cross Entropy Loss: [3.8945746 2.8896012]
Correct: [12.1875 23.125 ]%
Cross Entropy Loss: [3.820954  2.9955711]
Correct: [10.546875 23.59375 ]%
Cross Entropy Loss: [3.564299  2.6497583]
Correct: [13.75     24.765625]%
Cross Entropy Loss: [3.6856475 2.7007663]
Correct: [14.609375 23.203125]%
Cross Entropy Loss: [3.4151406 2.6983943]
Correct: [13.828125 25.234375]%
Cross Entropy Loss: [3.4241405 2.6245522]
Correct: [14.0625  26.09375]%
Cross Entropy Loss: [3.3426914 2.5967703]
Correct: [15.46875  26.484375]%
Cross Entropy Loss: [3.2667968 2.6326337]
Correct: [17.265625 27.890625]%
Cross Entropy Loss: [3.320641  2.6780038]
Correct: [16.25     27.109375]%
Cross Entropy Loss: [3.169768  2.4643428]
Correct: [16.09375 29.84375]%
Cross Entropy Loss: [3.1465955 2.3945916]
Correct: [15.546875 30.15625 ]%
Cross Entropy Loss: [2.986266  2.3093007]
Correct: [18.59375 30.3125 ]%
Cross Entropy Loss: [3.0388336 2.4310157]
Correct: [20.234375 31.171875]%
Cross Entropy Loss: [3.0892272 2.3056035]
Correct: [19.6875  32.03125]%
Cross Entropy Loss: [3.0571368 2.421267 ]
Correct: [17.578125 29.84375 ]%
Cross Entropy Loss: [3.0852628 2.3704216]
Correct: [19.375    30.390625]%
Cross Entropy Loss: [3.0512128 2.3868928]
Correct: [19.296875 32.109375]%
Cross Entropy Loss: [3.027636  2.3340905]
Correct: [19.609375 32.65625 ]%
Cross Entropy Loss: [3.0781162 2.2549465]
Correct: [20.390625 34.84375 ]%
Cross Entropy Loss: [2.893828  2.2259786]
Correct: [20.546875 34.21875 ]%
Cross Entropy Loss: [2.9055302 2.2219296]
Correct: [21.875   35.15625]%
Cross Entropy Loss: [2.8676295 2.108374 ]
Correct: [21.328125 35.703125]%
Cross Entropy Loss: [2.8262253 2.1915321]
Correct: [20.859375 33.59375 ]%
Cross Entropy Loss: [2.8070726 2.0325317]
Correct: [22.34375 37.1875 ]%
Cross Entropy Loss: [2.7413309 1.9289356]
Correct: [23.515625 39.6875  ]%
Cross Entropy Loss: [2.8622472 2.2309709]
Correct: [21.5625   35.390625]%
Cross Entropy Loss: [2.8631537 2.1316493]
Correct: [25.546875 36.015625]%
Cross Entropy Loss: [2.8161116 2.2107635]
Correct: [21.328125 34.375   ]%
Cross Entropy Loss: [2.9467907 2.1844616]
Correct: [21.875    35.390625]%
Cross Entropy Loss: [2.8130527 2.113578 ]
Correct: [21.015625 35.9375  ]%
Cross Entropy Loss: [2.747198 2.089418]
Correct: [22.734375 37.34375 ]%
Cross Entropy Loss: [2.6926951 2.0189242]
Correct: [23.59375  38.203125]%
Cross Entropy Loss: [2.6316378 1.8943977]
Correct: [26.09375  40.390625]%
Cross Entropy Loss: [2.737908  1.9645641]
Correct: [24.0625   38.828125]%
Cross Entropy Loss: [2.8403568 2.0318146]
Correct: [22.65625 39.84375]%
Cross Entropy Loss: [2.7549195 1.822452 ]
Correct: [24.0625 43.75  ]%
Cross Entropy Loss: [2.6795113 2.0256016]
Correct: [21.953125 37.109375]%
Cross Entropy Loss: [2.5867848 1.7345188]
Correct: [27.1875   45.078125]%
Cross Entropy Loss: [2.6175826 1.8836162]
Correct: [25.625   41.09375]%
Cross Entropy Loss: [2.7350922 1.8651159]
Correct: [22.34375  43.515625]%
Cross Entropy Loss: [2.6211905 1.7962688]
Correct: [27.109375 43.828125]%
Cross Entropy Loss: [2.5825527 1.8827003]
Correct: [25.46875 39.6875 ]%
Cross Entropy Loss: [2.6376565 1.7836764]
Correct: [25.9375 43.75  ]%
Cross Entropy Loss: [2.5825844 1.7944248]
Correct: [24.765625 42.03125 ]%
Cross Entropy Loss: [2.571115  1.7913485]
Correct: [26.875    41.015625]%
Cross Entropy Loss: [2.5827973 1.9166778]
Correct: [27.578125 40.546875]%
Cross Entropy Loss: [2.6336613 1.8473384]
Correct: [25.546875 43.515625]%
Cross Entropy Loss: [2.5386684 1.7500595]
Correct: [28.671875 44.84375 ]%
Cross Entropy Loss: [2.6029117 1.8635193]
Correct: [27.03125 42.96875]%
Cross Entropy Loss: [2.6544895 1.8170935]
Correct: [25.       43.828125]%
Cross Entropy Loss: [2.5716615 1.7597157]
Correct: [25.703125 44.609375]%
Cross Entropy Loss: [2.5358777 1.785189 ]
Correct: [26.171875 44.21875 ]%
Cross Entropy Loss: [2.45581   1.7592008]
Correct: [26.09375 46.09375]%
Cross Entropy Loss: [2.6317296 1.8432165]
Correct: [23.59375  42.578125]%
Cross Entropy Loss: [2.477721 1.562176]
Correct: [27.5      50.859375]%
Cross Entropy Loss: [2.5966911 1.6501175]
Correct: [26.71875  47.890625]%
Cross Entropy Loss: [2.6283836 1.8342499]
Correct: [26.171875 44.296875]%
Cross Entropy Loss: [2.5891643 1.8224297]
Correct: [27.109375 45.      ]%
Cross Entropy Loss: [2.5839498 1.770544 ]
Correct: [26.640625 45.625   ]%
Cross Entropy Loss: [2.5813203 1.7426443]
Correct: [25.15625 44.0625 ]%
Cross Entropy Loss: [2.512799  1.6958224]
Correct: [28.28125  45.546875]%
Cross Entropy Loss: [2.522997  1.7454729]
Correct: [27.03125  46.171875]%
Cross Entropy Loss: [2.5243523 1.7465271]
Correct: [26.640625 46.171875]%
Cross Entropy Loss: [2.4010887 1.5776136]
Correct: [29.921875 48.515625]%
Cross Entropy Loss: [2.4520717 1.6431043]
Correct: [29.53125 47.03125]%
Cross Entropy Loss: [2.5002449 1.62231  ]
Correct: [28.046875 46.796875]%
Cross Entropy Loss: [2.448183  1.6866318]
Correct: [27.265625 47.34375 ]%
Cross Entropy Loss: [2.3278177 1.5603622]
Correct: [31.796875 48.125   ]%
Cross Entropy Loss: [2.502673 1.572726]
Correct: [28.984375 48.75    ]%
Cross Entropy Loss: [2.460004  1.5865178]
Correct: [29.21875 48.125  ]%
Cross Entropy Loss: [2.415388  1.6269791]
Correct: [30.       48.046875]%
Cross Entropy Loss: [2.5293667 1.6809063]
Correct: [26.875    47.578125]%
Cross Entropy Loss: [2.374123 1.503344]
Correct: [30.234375 50.9375  ]%
Cross Entropy Loss: [2.382563  1.6600912]
Correct: [27.8125   46.796875]%
Cross Entropy Loss: [2.330771  1.4367571]
Correct: [27.96875  52.734375]%
Cross Entropy Loss: [2.2551608 1.4061832]
Correct: [32.03125 52.1875 ]%
Cross Entropy Loss: [2.4431841 1.6001146]
Correct: [29.765625 48.359375]%
Cross Entropy Loss: [2.3067389 1.4722195]
Correct: [32.578125 52.96875 ]%
Cross Entropy Loss: [2.4242828 1.5623109]
Correct: [30.546875 51.015625]%
Cross Entropy Loss: [2.404963  1.5988398]
Correct: [30.46875 49.375  ]%
Cross Entropy Loss: [2.3738644 1.6192579]
Correct: [31.25    50.46875]%
Cross Entropy Loss: [2.2819183 1.5332501]
Correct: [31.40625  50.234375]%
Cross Entropy Loss: [2.4128027 1.5696067]
Correct: [28.59375  50.234375]%
Cross Entropy Loss: [2.5017002 1.609463 ]
Correct: [27.578125 48.125   ]%
Cross Entropy Loss: [2.5566263 1.6966852]
Correct: [28.75     46.015625]%
Cross Entropy Loss: [2.5410113 1.6571732]
Correct: [29.21875  48.984375]%
Cross Entropy Loss: [2.3229337 1.4808272]
Correct: [28.28125  50.390625]%
Cross Entropy Loss: [2.4184113 1.5954044]
Correct: [28.90625 48.59375]%
Cross Entropy Loss: [2.567089  1.6687448]
Correct: [26.40625  46.953125]%
Cross Entropy Loss: [2.3976378 1.5823095]
Correct: [29.21875 49.6875 ]%
Cross Entropy Loss: [2.4813595 1.5941111]
Correct: [26.25    47.34375]%
Cross Entropy Loss: [2.3416488 1.4831976]
Correct: [29.375  49.0625]%
Cross Entropy Loss: [2.33704 1.40119]
Correct: [30.546875 55.703125]%
Cross Entropy Loss: [2.4844353 1.5852791]
Correct: [25.546875 50.546875]%
Cross Entropy Loss: [2.25913   1.4476125]
Correct: [32.578125 51.71875 ]%
Cross Entropy Loss: [2.3925385 1.5014861]
Correct: [31.40625 51.40625]%
Cross Entropy Loss: [2.3728828 1.5486963]
Correct: [30.   51.25]%
Cross Entropy Loss: [2.396335  1.5730026]
Correct: [30.703125 48.28125 ]%
Cross Entropy Loss: [2.315197  1.4658949]
Correct: [30.15625  51.953125]%
Cross Entropy Loss: [2.2947965 1.476065 ]
Correct: [29.921875 52.578125]%
Cross Entropy Loss: [2.3443992 1.459317 ]
Correct: [31.640625 51.796875]%
Cross Entropy Loss: [2.3687196 1.5258805]
Correct: [29.453125 50.859375]%
Cross Entropy Loss: [2.2844274 1.4674822]
Correct: [31.015625 52.65625 ]%
Cross Entropy Loss: [2.2579598 1.5650508]
Correct: [33.75   51.5625]%
Cross Entropy Loss: [2.22128   1.3795776]
Correct: [30.859375 54.21875 ]%
Cross Entropy Loss: [2.2676878 1.4663472]
Correct: [32.1875  52.65625]%
Cross Entropy Loss: [2.3793247 1.5582575]
Correct: [28.046875 50.546875]%
Cross Entropy Loss: [2.2854948 1.4375732]
Correct: [32.578125 53.046875]%
Cross Entropy Loss: [2.2691517 1.3015031]
Correct: [31.328125 54.609375]%
Cross Entropy Loss: [2.3235888 1.4401485]
Correct: [32.265625 53.203125]%
Cross Entropy Loss: [2.2807784 1.3761926]
Correct: [29.375    55.234375]%
Cross Entropy Loss: [2.2766078 1.3260014]
Correct: [31.25     56.015625]%
Cross Entropy Loss: [2.3384786 1.3972623]
Correct: [30.703125 54.0625  ]%
Cross Entropy Loss: [2.239877  1.3276863]
Correct: [33.359375 56.25    ]%
Cross Entropy Loss: [2.3611073 1.3327819]
Correct: [29.6875 56.5625]%
Cross Entropy Loss: [2.261144  1.3886083]
Correct: [32.265625 54.53125 ]%
Cross Entropy Loss: [2.3639069 1.4025548]
Correct: [31.953125 53.203125]%
Cross Entropy Loss: [2.3807318 1.4964199]
Correct: [30.46875  50.859375]%
Cross Entropy Loss: [2.2797737 1.4531746]
Correct: [32.65625 52.34375]%
Cross Entropy Loss: [2.3015342 1.3775599]
Correct: [30.859375 55.3125  ]%
Cross Entropy Loss: [2.4187794 1.499069 ]
Correct: [29.375    50.859375]%
Cross Entropy Loss: [2.3531373 1.4245743]
Correct: [31.71875 53.4375 ]%
Cross Entropy Loss: [2.3349235 1.4023972]
Correct: [31.875   53.28125]%
Cross Entropy Loss: [2.357822 1.438986]
Correct: [29.6875 52.1875]%
Cross Entropy Loss: [2.400087  1.4807303]
Correct: [30.625  49.6875]%
Cross Entropy Loss: [2.3477216 1.4440556]
Correct: [31.875    53.515625]%
Cross Entropy Loss: [2.2957122 1.3509364]
Correct: [31.171875 54.21875 ]%
Cross Entropy Loss: [2.271256  1.2873652]
Correct: [33.59375 57.34375]%
Cross Entropy Loss: [2.2768269 1.2799364]
Correct: [32.265625 57.65625 ]%
Cross Entropy Loss: [2.3255336 1.380134 ]
Correct: [32.109375 55.3125  ]%
Cross Entropy Loss: [2.2882273 1.4125729]
Correct: [32.421875 53.28125 ]%
Cross Entropy Loss: [2.398233  1.4821151]
Correct: [28.28125  52.890625]%
Cross Entropy Loss: [2.2781453 1.3212835]
Correct: [30.3125   56.171875]%
Cross Entropy Loss: [2.2919357 1.3750699]
Correct: [31.5625   54.453125]%
Cross Entropy Loss: [2.20497   1.2765039]
Correct: [35.3125   57.578125]%
Cross Entropy Loss: [2.272082  1.3382531]
Correct: [32.578125 55.546875]%
Cross Entropy Loss: [2.2032666 1.2599838]
Correct: [33.984375 57.34375 ]%
Cross Entropy Loss: [2.2773478 1.2810342]
Correct: [32.890625 56.328125]%
Cross Entropy Loss: [2.3670692 1.3336469]
Correct: [29.6875  56.09375]%
Cross Entropy Loss: [2.4782019 1.5009553]
Correct: [28.75 53.75]%
Cross Entropy Loss: [2.2481072 1.3238726]
Correct: [33.671875 57.03125 ]%
Cross Entropy Loss: [2.3733873 1.476785 ]
Correct: [31.328125 51.953125]%
Cross Entropy Loss: [2.299612  1.3109412]
Correct: [28.828125 57.03125 ]%
Cross Entropy Loss: [2.40865   1.4642934]
Correct: [30.546875 52.421875]%
Cross Entropy Loss: [2.2160816 1.3334074]
Correct: [32.03125  56.796875]%
Cross Entropy Loss: [2.1265419 1.1166967]
Correct: [31.875  60.9375]%
Cross Entropy Loss: [2.1095867 1.275995 ]
Correct: [35.15625 57.65625]%
Cross Entropy Loss: [2.3229258 1.3776207]
Correct: [32.65625  56.796875]%
Cross Entropy Loss: [2.2739053 1.2817076]
Correct: [30.703125 58.046875]%
Cross Entropy Loss: [2.167472  1.2050197]
Correct: [32.34375  60.078125]%
Cross Entropy Loss: [2.2191257 1.2010783]
Correct: [30.9375   59.296875]%
Cross Entropy Loss: [2.2528615 1.3258622]
Correct: [33.125   57.03125]%
Cross Entropy Loss: [2.389148  1.3039352]
Correct: [28.515625 55.78125 ]%
Cross Entropy Loss: [2.3398554 1.3570752]
Correct: [31.640625 54.6875  ]%
Cross Entropy Loss: [2.3871522 1.424741 ]
Correct: [31.09375  54.296875]%
Cross Entropy Loss: [2.40531   1.4805454]
Correct: [29.84375 51.71875]%
Cross Entropy Loss: [2.2485678 1.3561897]
Correct: [30.546875 56.5625  ]%
Cross Entropy Loss: [2.516479  1.5137602]
Correct: [28.75  51.875]%
Cross Entropy Loss: [2.40506   1.4596338]
Correct: [30.234375 52.421875]%
Cross Entropy Loss: [2.12909   1.3146592]
Correct: [33.90625  57.734375]%
Cross Entropy Loss: [2.3817828 1.3444784]
Correct: [29.609375 54.0625  ]%
Cross Entropy Loss: [2.226515  1.3272054]
Correct: [32.65625  55.546875]%
Cross Entropy Loss: [2.2908902 1.3386853]
Correct: [30.703125 55.625   ]%
Cross Entropy Loss: [2.245748  1.2963207]
Correct: [32.8125 57.5   ]%
Cross Entropy Loss: [2.1577973 1.2150828]
Correct: [34.296875 60.859375]%
Cross Entropy Loss: [2.197129  1.2898552]
Correct: [32.5      56.484375]%
Cross Entropy Loss: [2.1791012 1.2298634]
Correct: [33.828125 59.609375]%
Cross Entropy Loss: [2.2695413 1.2715654]
Correct: [31.796875 56.171875]%
Cross Entropy Loss: [2.1917355 1.2653673]
Correct: [35.3125 59.375 ]%
Cross Entropy Loss: [2.1806483 1.3399513]
Correct: [31.40625  57.421875]%
Cross Entropy Loss: [2.2278328 1.2639043]
Correct: [34.21875  57.578125]%
Cross Entropy Loss: [2.1299484 1.2346227]
Correct: [35.15625 59.0625 ]%
Cross Entropy Loss: [2.2349045 1.306574 ]
Correct: [32.34375 59.6875 ]%
Cross Entropy Loss: [2.2293546 1.296579 ]
Correct: [32.34375  56.484375]%
Cross Entropy Loss: [2.0019922 1.1064491]
Correct: [37.96875  63.671875]%
Cross Entropy Loss: [2.1467674 1.1199954]
Correct: [34.609375 62.8125  ]%
Cross Entropy Loss: [2.314003 1.45438 ]
Correct: [29.921875 51.640625]%
Cross Entropy Loss: [2.2204108 1.3193465]
Correct: [30.9375 57.5   ]%
Cross Entropy Loss: [2.2615457 1.260369 ]
Correct: [32.1875  56.71875]%
Cross Entropy Loss: [2.215611 1.236027]
Correct: [31.796875 59.453125]%
Cross Entropy Loss: [2.2558377 1.2421529]
Correct: [30.3125 57.1875]%
Cross Entropy Loss: [2.2041094 1.3359329]
Correct: [31.25    54.53125]%
Cross Entropy Loss: [2.1226146 1.1927675]
Correct: [35.859375 60.390625]%
Cross Entropy Loss: [2.0108964 1.0700333]
Correct: [36.71875 62.5    ]%
Cross Entropy Loss: [2.1650405 1.2488431]
Correct: [33.828125 58.984375]%
Cross Entropy Loss: [2.0763164 1.159961 ]
Correct: [33.984375 59.609375]%
Cross Entropy Loss: [2.2887151 1.3284991]
Correct: [32.421875 58.125   ]%
Cross Entropy Loss: [2.2767625 1.2749548]
Correct: [32.34375 57.96875]%
Cross Entropy Loss: [2.1088898 1.0527987]
Correct: [34.453125 64.453125]%
Cross Entropy Loss: [2.2221723 1.2410733]
Correct: [32.890625 58.515625]%
Cross Entropy Loss: [2.2018695 1.1777635]
Correct: [33.203125 61.015625]%
Cross Entropy Loss: [2.3442333 1.3075377]
Correct: [32.03125 55.78125]%
Cross Entropy Loss: [2.2214828 1.2147939]
Correct: [32.5      60.859375]%
Cross Entropy Loss: [2.1835735 1.2677255]
Correct: [32.890625 57.265625]%
Cross Entropy Loss: [1.995806  1.0640777]
Correct: [36.40625 63.59375]%
Cross Entropy Loss: [2.243962  1.3389227]
Correct: [33.203125 57.96875 ]%
Cross Entropy Loss: [2.213465  1.2385209]
Correct: [32.265625 59.375   ]%
Cross Entropy Loss: [2.1252456 1.1576301]
Correct: [33.828125 61.09375 ]%
Cross Entropy Loss: [2.1604712 1.2302692]
Correct: [34.21875 60.3125 ]%
Cross Entropy Loss: [2.0066557 1.0711169]
Correct: [36.71875 62.03125]%
Cross Entropy Loss: [2.224689  1.2388389]
Correct: [30.9375  58.59375]%
Cross Entropy Loss: [2.0441384 1.0921986]
Correct: [36.09375 63.4375 ]%
Cross Entropy Loss: [2.1355634 1.1852753]
Correct: [32.5      60.234375]%
Cross Entropy Loss: [2.1778278 1.2086099]
Correct: [33.515625 57.734375]%
Cross Entropy Loss: [2.2058349 1.1783116]
Correct: [30.78125 60.625  ]%
Cross Entropy Loss: [2.2691238 1.1900282]
Correct: [30.703125 60.078125]%
Cross Entropy Loss: [2.1808307 1.1544319]
Correct: [32.890625 62.5     ]%
Cross Entropy Loss: [2.2770205 1.2187328]
Correct: [32.890625 58.671875]%
Cross Entropy Loss: [2.0733552 1.1033981]
Correct: [33.90625 62.1875 ]%
Cross Entropy Loss: [2.2562535 1.2965081]
Correct: [30.9375  58.59375]%
Cross Entropy Loss: [2.1257691 1.1786801]
Correct: [36.015625 60.390625]%
Cross Entropy Loss: [2.186401  1.2037128]
Correct: [32.5 60. ]%
Cross Entropy Loss: [2.1462145 1.2054521]
Correct: [33.4375  60.46875]%
Cross Entropy Loss: [2.3487113 1.2582158]
Correct: [29.609375 58.671875]%
Cross Entropy Loss: [2.143238  1.1597302]
Correct: [35.15625  61.796875]%
Cross Entropy Loss: [2.1555495 1.1899214]
Correct: [33.046875 60.78125 ]%
Cross Entropy Loss: [2.3535953 1.3144853]
Correct: [32.578125 57.109375]%
Cross Entropy Loss: [1.976383  1.0317686]
Correct: [37.96875 65.3125 ]%
Cross Entropy Loss: [2.1781986 1.133721 ]
Correct: [33.515625 62.5     ]%
Cross Entropy Loss: [2.133198  1.1597993]
Correct: [34.53125  60.078125]%
Cross Entropy Loss: [2.1323743 1.196222 ]
Correct: [33.90625  62.265625]%
Cross Entropy Loss: [2.2972589 1.2847755]
Correct: [32.5      57.421875]%
Cross Entropy Loss: [2.1190689 1.1783539]
Correct: [32.734375 59.453125]%
Cross Entropy Loss: [2.2093656 1.2345682]
Correct: [33.359375 59.375   ]%
Cross Entropy Loss: [2.1435971 1.11424  ]
Correct: [33.828125 63.359375]%
Cross Entropy Loss: [2.1122904 1.1555637]
Correct: [35.703125 62.1875  ]%
Cross Entropy Loss: [2.1720138 1.1783694]
Correct: [32.578125 62.109375]%
Cross Entropy Loss: [2.1315763 1.1422364]
Correct: [33.125    61.796875]%
Cross Entropy Loss: [1.9625031  0.99608314]
Correct: [37.1875 64.6875]%
Cross Entropy Loss: [2.2433178 1.1916873]
Correct: [33.515625 60.78125 ]%
Cross Entropy Loss: [2.2122304 1.14859  ]
Correct: [33.4375   61.953125]%
Cross Entropy Loss: [2.0847392 1.0998975]
Correct: [34.6875 61.5625]%
Cross Entropy Loss: [2.111541  1.1216688]
Correct: [35.       60.546875]%
Cross Entropy Loss: [2.0939105 1.0984247]
Correct: [35.46875  62.578125]%
Cross Entropy Loss: [2.0840175 1.0733654]
Correct: [33.515625 61.953125]%
Cross Entropy Loss: [2.059178  1.0151303]
Correct: [35.       65.390625]%
Cross Entropy Loss: [2.1241944 1.1936681]
Correct: [33.984375 61.5625  ]%
Cross Entropy Loss: [2.152366  1.1261685]
Correct: [34.21875 62.1875 ]%
Cross Entropy Loss: [2.2062345 1.1785904]
Correct: [33.203125 60.234375]%
Cross Entropy Loss: [2.0634491 1.0513628]
Correct: [35.15625 65.78125]%
Cross Entropy Loss: [2.0277543 1.1403968]
Correct: [36.171875 62.03125 ]%
Cross Entropy Loss: [2.1327224 1.112538 ]
Correct: [33.4375 63.125 ]%
Cross Entropy Loss: [1.9441202 0.9338592]
Correct: [35.859375 67.265625]%
Cross Entropy Loss: [2.14573   1.1241181]
Correct: [35.15625 63.59375]%
Cross Entropy Loss: [2.058078  1.0698426]
Correct: [34.140625 63.046875]%
Cross Entropy Loss: [2.1855145 1.1218735]
Correct: [34.609375 62.109375]%
Cross Entropy Loss: [1.9065931 0.9865529]
Correct: [38.59375  65.859375]%
Cross Entropy Loss: [2.0935419 1.0306355]
Correct: [34.921875 65.234375]%
Cross Entropy Loss: [2.1347167 1.1488874]
Correct: [32.34375  62.578125]%
Cross Entropy Loss: [2.25144   1.2165654]
Correct: [33.828125 59.765625]%
Cross Entropy Loss: [2.1221347 1.1931523]
Correct: [32.96875  60.234375]%
Cross Entropy Loss: [1.9877834 1.0697641]
Correct: [38.359375 63.203125]%
Cross Entropy Loss: [1.901036  0.9767046]
Correct: [40.78125  69.140625]%
Cross Entropy Loss: [2.017328  0.9839282]
Correct: [37.03125 69.0625 ]%
Cross Entropy Loss: [2.0206945 1.0241355]
Correct: [35.390625 67.265625]%
Cross Entropy Loss: [2.0634017 1.0314963]
Correct: [34.6875   63.515625]%
Cross Entropy Loss: [2.0637345 1.1136535]
Correct: [34.6875 61.25  ]%
Cross Entropy Loss: [2.165344  1.1659467]
Correct: [34.21875  60.546875]%
Cross Entropy Loss: [1.9341033  0.98996276]
Correct: [38.203125 68.046875]%
Cross Entropy Loss: [2.0767736 1.1296029]
Correct: [36.015625 62.96875 ]%
Cross Entropy Loss: [1.9799122 0.9830142]
Correct: [37.5      68.046875]%
Cross Entropy Loss: [2.1288888 1.102431 ]
Correct: [35.546875 63.515625]%
Cross Entropy Loss: [2.1784267 1.2206053]
Correct: [34.53125  60.703125]%
Cross Entropy Loss: [1.9744213  0.98222274]
Correct: [37.96875 67.1875 ]%
Cross Entropy Loss: [2.1761372 1.214867 ]
Correct: [31.5625   57.578125]%
Cross Entropy Loss: [2.1439831 1.0546397]
Correct: [32.265625 65.      ]%
Cross Entropy Loss: [2.3111758 1.2379646]
Correct: [32.265625 58.828125]%
Cross Entropy Loss: [1.918314  0.9939888]
Correct: [39.140625 64.765625]%
Cross Entropy Loss: [2.1154976 1.0898044]
Correct: [33.359375 63.59375 ]%
Cross Entropy Loss: [2.2802463 1.2327421]
Correct: [32.421875 58.75    ]%
Cross Entropy Loss: [1.9258674  0.96821374]
Correct: [39.453125 65.859375]%
Cross Entropy Loss: [1.9349041 0.9977549]
Correct: [38.90625  65.390625]%
Cross Entropy Loss: [1.9564587 0.9257614]
Correct: [39.0625   68.671875]%
Cross Entropy Loss: [2.0662205 0.9779871]
Correct: [36.5625   68.828125]%
Cross Entropy Loss: [2.283265  1.1285784]
Correct: [32.5     62.96875]%
Cross Entropy Loss: [1.9384588 0.9834124]
Correct: [38.90625  66.015625]%
Cross Entropy Loss: [2.0634747 1.0879116]
Correct: [33.828125 63.046875]%
Cross Entropy Loss: [2.0325809  0.97033894]
Correct: [36.953125 67.421875]%
Cross Entropy Loss: [2.129327  1.1245673]
Correct: [34.84375 62.96875]%
Cross Entropy Loss: [2.2052293 1.1474326]
Correct: [32.03125  61.171875]%
Cross Entropy Loss: [2.0500762 0.9504854]
Correct: [36.25     67.265625]%
Cross Entropy Loss: [2.1011226 1.0427713]
Correct: [35.390625 65.078125]%
Cross Entropy Loss: [2.1309547 1.0412221]
Correct: [35.3125   63.515625]%
Cross Entropy Loss: [2.2090192 1.1062742]
Correct: [32.421875 61.953125]%
Cross Entropy Loss: [2.2421412 1.1895859]
Correct: [31.640625 61.40625 ]%
Cross Entropy Loss: [2.0257316  0.99122363]
Correct: [36.25    66.71875]%
Cross Entropy Loss: [2.14191   1.0704272]
Correct: [33.359375 63.046875]%
Cross Entropy Loss: [2.0045218 0.9369028]
Correct: [37.734375 67.265625]%
Cross Entropy Loss: [2.160404  1.1237806]
Correct: [33.4375  63.28125]%
Cross Entropy Loss: [1.9392579  0.92556363]
Correct: [37.8125   69.609375]%
Cross Entropy Loss: [1.9910405  0.95980215]
Correct: [37.421875 66.484375]%
Cross Entropy Loss: [2.1098943 1.0629327]
Correct: [34.53125 65.46875]%
Cross Entropy Loss: [1.9656417 1.0099294]
Correct: [36.09375 65.625  ]%
Cross Entropy Loss: [2.120175  1.0691597]
Correct: [37.1875   63.984375]%
Cross Entropy Loss: [1.9666752 1.1107584]
Correct: [40.15625  64.921875]%
Cross Entropy Loss: [2.1508212 1.1373085]
Correct: [34.453125 62.890625]%
Cross Entropy Loss: [2.0738356 1.0416645]
Correct: [36.171875 64.375   ]%
Cross Entropy Loss: [1.9455401 1.0102122]
Correct: [36.328125 66.796875]%
Cross Entropy Loss: [2.0634446 1.0516846]
Correct: [36.71875  63.515625]%
Cross Entropy Loss: [2.2197862 1.080001 ]
Correct: [32.8125  64.53125]%
Cross Entropy Loss: [2.233566  1.1801441]
Correct: [33.046875 60.9375  ]%
Cross Entropy Loss: [1.9392802  0.88568294]
Correct: [38.046875 68.828125]%
Cross Entropy Loss: [1.9014928 0.8435947]
Correct: [38.90625 70.625  ]%
Cross Entropy Loss: [1.88759    0.90144455]
Correct: [38.59375  70.703125]%
Cross Entropy Loss: [2.0626452  0.99576885]
Correct: [35.078125 65.46875 ]%
Cross Entropy Loss: [2.1470504 1.1259902]
Correct: [36.09375  62.109375]%
Cross Entropy Loss: [1.9646857  0.89839536]
Correct: [37.734375 69.453125]%
Cross Entropy Loss: [2.0850308  0.98927116]
Correct: [34.296875 66.25    ]%
Cross Entropy Loss: [1.9850029 0.9412414]
Correct: [38.203125 68.046875]%
Cross Entropy Loss: [1.9710951 1.0479825]
Correct: [37.65625  64.921875]%
Cross Entropy Loss: [1.9712403 0.8848939]
Correct: [37.65625  70.390625]%
Cross Entropy Loss: [2.0818489 1.0339851]
Correct: [35.       65.390625]%
Cross Entropy Loss: [2.0616808 1.0331202]
Correct: [36.953125 65.859375]%
Cross Entropy Loss: [2.1676717 1.0887108]
Correct: [33.984375 62.890625]%
Cross Entropy Loss: [2.0967023  0.98823863]
Correct: [34.140625 66.953125]%
Cross Entropy Loss: [2.181618  1.1704443]
Correct: [34.140625 61.640625]%
Cross Entropy Loss: [1.9658992  0.92764837]
Correct: [38.28125  69.453125]%
Cross Entropy Loss: [2.0889847 1.0531651]
Correct: [34.453125 65.546875]%
Cross Entropy Loss: [1.9373844  0.91875184]
Correct: [38.59375 68.59375]%
Cross Entropy Loss: [1.8976494 0.8835932]
Correct: [38.203125 69.296875]%
Cross Entropy Loss: [2.1454206 1.0919864]
Correct: [32.421875 62.890625]%
Cross Entropy Loss: [1.9757704 1.0120105]
Correct: [36.328125 63.515625]%
Cross Entropy Loss: [2.114286  1.0862621]
Correct: [35.234375 65.625   ]%
Cross Entropy Loss: [1.9755837  0.94148743]
Correct: [35.859375 68.4375  ]%
Cross Entropy Loss: [2.131722  1.0045172]
Correct: [36.171875 65.703125]%
Cross Entropy Loss: [1.9944563 0.9981648]
Correct: [37.109375 65.546875]%
Cross Entropy Loss: [2.0344603  0.98477805]
Correct: [36.796875 67.03125 ]%
Cross Entropy Loss: [2.059626  0.9630005]
Correct: [37.5      67.578125]%
Cross Entropy Loss: [1.965571   0.95037633]
Correct: [37.890625 68.671875]%
Cross Entropy Loss: [2.114256 1.041924]
Correct: [34.921875 64.453125]%
Cross Entropy Loss: [2.0621521 1.0096956]
Correct: [36.71875  66.171875]%
Cross Entropy Loss: [1.9320924 0.97648  ]
Correct: [37.890625 66.953125]%
Cross Entropy Loss: [1.9095819 0.929115 ]
Correct: [37.421875 67.265625]%
Cross Entropy Loss: [1.9487722 0.8894169]
Correct: [37.109375 70.46875 ]%
Cross Entropy Loss: [1.8487208 0.7841818]
Correct: [42.421875 74.21875 ]%
Cross Entropy Loss: [1.9685814  0.92524815]
Correct: [37.65625  68.828125]%
Cross Entropy Loss: [2.0319257  0.98477745]
Correct: [37.578125 67.734375]%
Cross Entropy Loss: [1.9917917 0.938358 ]
Correct: [37.65625  67.265625]%
Cross Entropy Loss: [1.9789757  0.88956416]
Correct: [38.125    68.359375]%
Cross Entropy Loss: [2.0765464 1.0089693]
Correct: [35.390625 66.953125]%
Cross Entropy Loss: [2.098847   0.94242144]
Correct: [36.171875 67.578125]%
Cross Entropy Loss: [2.1204734 1.0150812]
Correct: [32.1875 65.3125]%
Cross Entropy Loss: [1.9895303 0.9160978]
Correct: [39.453125 68.515625]%
Cross Entropy Loss: [1.9209778 0.8849082]
Correct: [38.984375 69.0625  ]%
Cross Entropy Loss: [2.0471623  0.98086417]
Correct: [36.71875 67.34375]%
Cross Entropy Loss: [2.0314407 0.961063 ]
Correct: [36.640625 67.578125]%
Cross Entropy Loss: [1.9328007  0.91085875]
Correct: [40.       68.984375]%
Cross Entropy Loss: [1.9666016 0.9720584]
Correct: [36.40625 67.65625]%
Cross Entropy Loss: [2.0907552  0.97460425]
Correct: [34.84375 66.875  ]%
Cross Entropy Loss: [1.9897633  0.90421486]
Correct: [36.015625 68.90625 ]%
Cross Entropy Loss: [2.0546653 0.9936339]
Correct: [36.484375 66.640625]%
Cross Entropy Loss: [2.094484   0.98361015]
Correct: [36.953125 66.71875 ]%
Cross Entropy Loss: [2.0127513  0.96544516]
Correct: [36.640625 67.421875]%
Cross Entropy Loss: [2.0442758 1.0683099]
Correct: [34.453125 63.515625]%
Cross Entropy Loss: [1.9474897  0.95986634]
Correct: [37.890625 68.125   ]%
Cross Entropy Loss: [2.0015848  0.93926084]
Correct: [36.25    68.90625]%
Cross Entropy Loss: [2.1317792 1.0527403]
Correct: [35.9375  65.78125]%
Cross Entropy Loss: [1.9316018 0.952427 ]
Correct: [39.0625 68.125 ]%
Cross Entropy Loss: [2.00638    0.96092737]
Correct: [36.640625 67.421875]%
Cross Entropy Loss: [1.8246237 0.8705657]
Correct: [41.328125 69.6875  ]%
Cross Entropy Loss: [1.8658307 0.8492869]
Correct: [40.625 69.375]%
Cross Entropy Loss: [2.0435443  0.95105237]
Correct: [36.328125 68.125   ]%
Cross Entropy Loss: [1.9486004 0.948599 ]
Correct: [37.5 67.5]%
Cross Entropy Loss: [2.0627909 0.9531562]
Correct: [34.921875 66.796875]%
Cross Entropy Loss: [1.9933479 0.9111034]
Correct: [35.703125 68.984375]%
Cross Entropy Loss: [2.025021   0.91847146]
Correct: [37.34375  68.671875]%
Cross Entropy Loss: [2.3270335 1.1761326]
Correct: [31.5625 61.875 ]%
Cross Entropy Loss: [1.9035137 0.9525846]
Correct: [37.96875  67.890625]%
Cross Entropy Loss: [2.020745  0.9784497]
Correct: [36.484375 66.640625]%
Cross Entropy Loss: [1.9699932  0.88107526]
Correct: [39.453125 70.390625]%
Cross Entropy Loss: [2.00168  0.959568]
Correct: [35.9375   67.109375]%
Cross Entropy Loss: [2.037626   0.97339535]
Correct: [36.171875 67.5     ]%
Cross Entropy Loss: [1.8798797 0.854812 ]
Correct: [38.984375 70.859375]%
Cross Entropy Loss: [2.1051388 0.9969858]
Correct: [36.25     65.859375]%
Cross Entropy Loss: [2.1753252 1.0333042]
Correct: [35.       66.796875]%
Cross Entropy Loss: [1.9631293  0.91759413]
Correct: [37.1875  69.53125]%
Cross Entropy Loss: [1.9367015  0.87649316]
Correct: [39.21875 69.6875 ]%
Cross Entropy Loss: [2.0667691 1.0418979]
Correct: [36.640625 66.40625 ]%
Cross Entropy Loss: [1.953772  1.0190605]
Correct: [37.03125  65.234375]%
Cross Entropy Loss: [1.886129  0.9237026]
Correct: [40.625  66.5625]%
Cross Entropy Loss: [2.0398211  0.97493684]
Correct: [37.1875   65.546875]%
Cross Entropy Loss: [1.8908637 0.8097534]
Correct: [36.796875 71.5625  ]%
Cross Entropy Loss: [1.8600327 0.7603122]
Correct: [39.765625 72.96875 ]%
Cross Entropy Loss: [2.103202  0.9985012]
Correct: [35.625    66.640625]%
Cross Entropy Loss: [2.159507  0.9664115]
Correct: [34.0625   68.828125]%
Cross Entropy Loss: [2.0943897 0.9971267]
Correct: [34.921875 65.625   ]%
Cross Entropy Loss: [2.063113 0.992488]
Correct: [33.828125 66.328125]%
Cross Entropy Loss: [1.9957758  0.96082366]
Correct: [37.5    67.8125]%
Cross Entropy Loss: [2.1952832 1.0325992]
Correct: [33.828125 65.      ]%
Cross Entropy Loss: [2.049588   0.88670176]
Correct: [32.96875  67.578125]%
Cross Entropy Loss: [1.9218496  0.86230105]
Correct: [38.671875 70.46875 ]%
Cross Entropy Loss: [1.8784367 0.8190611]
Correct: [39.765625 71.71875 ]%
Cross Entropy Loss: [1.9980795 0.9758997]
Correct: [37.34375  68.203125]%
Cross Entropy Loss: [1.8787616 0.8918911]
Correct: [38.984375 68.75    ]%
Cross Entropy Loss: [1.9422083 0.8649384]
Correct: [38.59375  70.546875]%
Cross Entropy Loss: [1.8889859  0.85567033]
Correct: [38.75     71.171875]%
Cross Entropy Loss: [1.9246792 0.9097005]
Correct: [37.5      69.765625]%
Cross Entropy Loss: [2.0962558  0.97551453]
Correct: [33.828125 67.734375]%
Cross Entropy Loss: [2.0486484  0.94421977]
Correct: [35.15625 69.21875]%
Cross Entropy Loss: [1.9825137  0.89731896]
Correct: [38.90625 69.0625 ]%
Cross Entropy Loss: [2.0522528 0.918252 ]
Correct: [35.9375   69.140625]%
Cross Entropy Loss: [2.011265   0.87225235]
Correct: [38.359375 69.84375 ]%
Cross Entropy Loss: [1.8965483 0.8765501]
Correct: [38.203125 69.453125]%
Cross Entropy Loss: [1.8651396 0.8475057]
Correct: [38.75  69.375]%
Cross Entropy Loss: [1.863528   0.85645753]
Correct: [39.21875 70.625  ]%
Cross Entropy Loss: [1.8804566  0.90898335]
Correct: [39.53125  71.015625]%
Cross Entropy Loss: [2.0096335 0.9515972]
Correct: [38.4375   68.671875]%
Cross Entropy Loss: [1.7532084  0.79297614]
Correct: [42.65625  73.828125]%
Cross Entropy Loss: [1.9990971 0.9433101]
Correct: [36.71875  67.734375]%
Cross Entropy Loss: [1.9729093  0.97961235]
Correct: [38.515625 66.328125]%
Cross Entropy Loss: [2.123677  1.1054599]
Correct: [35.46875  63.515625]%
Cross Entropy Loss: [2.0971045 1.0025947]
Correct: [35.390625 66.09375 ]%
Cross Entropy Loss: [1.8413223  0.84008074]
Correct: [37.734375 70.      ]%
Cross Entropy Loss: [1.844622   0.83326787]
Correct: [39.375    71.171875]%
Cross Entropy Loss: [1.9734838 0.9557816]
Correct: [37.34375 68.59375]%
Cross Entropy Loss: [2.0033917 0.8578272]
Correct: [38.046875 71.5625  ]%
Cross Entropy Loss: [2.0226612 0.961528 ]
Correct: [36.09375 67.1875 ]%
Cross Entropy Loss: [1.73587   0.7808001]
Correct: [41.171875 71.71875 ]%
Cross Entropy Loss: [1.9928377 0.9319927]
Correct: [37.8125  68.90625]%
Cross Entropy Loss: [2.001022  0.9561221]
Correct: [37.5      68.203125]%
Cross Entropy Loss: [2.0754712 0.9611322]
Correct: [37.03125  64.921875]%
Cross Entropy Loss: [1.9277353  0.82283497]
Correct: [37.265625 71.171875]%
Cross Entropy Loss: [2.051054   0.96825564]
Correct: [35.625    68.203125]%
Cross Entropy Loss: [1.939779   0.81244963]
Correct: [39.296875 72.03125 ]%
Cross Entropy Loss: [1.8817718 0.8325998]
Correct: [38.4375  72.34375]%
Cross Entropy Loss: [1.984957  0.8346429]
Correct: [37.109375 70.625   ]%
Cross Entropy Loss: [1.8502548  0.77026904]
Correct: [40.390625 74.6875  ]%
Cross Entropy Loss: [1.9609667 0.8666066]
Correct: [37.8125   70.859375]%
Cross Entropy Loss: [1.9260147  0.79586405]
Correct: [37.8125  72.65625]%
Cross Entropy Loss: [1.8936918  0.90379256]
Correct: [37.890625 70.78125 ]%
Cross Entropy Loss: [1.8732399 0.8267051]
Correct: [40.9375 72.5   ]%
Cross Entropy Loss: [1.989661  0.9090274]
Correct: [37.5      69.609375]%
Cross Entropy Loss: [2.179319  1.0678756]
Correct: [34.53125 65.9375 ]%
Cross Entropy Loss: [1.9578682  0.89761484]
Correct: [37.03125  68.359375]%
Cross Entropy Loss: [1.9624512  0.90796673]
Correct: [37.421875 71.09375 ]%
Cross Entropy Loss: [1.9122375  0.85218143]
Correct: [38.125   69.84375]%
Cross Entropy Loss: [2.0853138 1.0030634]
Correct: [36.953125 68.046875]%
Cross Entropy Loss: [1.9190973 0.8771528]
Correct: [37.1875   71.328125]%
Cross Entropy Loss: [1.9544914  0.86003494]
Correct: [39.53125  71.015625]%
Cross Entropy Loss: [1.8948532 0.8068183]
Correct: [38.59375 72.96875]%
Cross Entropy Loss: [1.9353803  0.84142685]
Correct: [38.4375 72.1875]%
Cross Entropy Loss: [1.9477952  0.83895195]
Correct: [38.125   70.78125]%
Cross Entropy Loss: [1.9227444 0.8735463]
Correct: [38.515625 70.390625]%
Cross Entropy Loss: [1.9804776 0.8924748]
Correct: [37.578125 67.1875  ]%
Cross Entropy Loss: [1.9269329 0.819456 ]
Correct: [38.90625  72.421875]%
Cross Entropy Loss: [1.9962101 0.8835033]
Correct: [37.578125 70.3125  ]%
Cross Entropy Loss: [1.8087041 0.785474 ]
Correct: [38.984375 71.5625  ]%
Cross Entropy Loss: [2.03966   0.9373995]
Correct: [35.  67.5]%
Cross Entropy Loss: [2.0304132  0.90852773]
Correct: [36.71875  69.296875]%
Cross Entropy Loss: [1.905365   0.84209096]
Correct: [41.25     72.265625]%
Cross Entropy Loss: [1.9505103 0.8191303]
Correct: [37.65625 73.125  ]%
Cross Entropy Loss: [2.0503025  0.87213975]
Correct: [35.234375 69.0625  ]%
Cross Entropy Loss: [1.9317973  0.84376764]
Correct: [39.765625 71.640625]%
Cross Entropy Loss: [2.014137   0.87423134]
Correct: [36.5625 69.375 ]%
Cross Entropy Loss: [1.9005562  0.84304696]
Correct: [39.453125 70.46875 ]%
Cross Entropy Loss: [2.0660887  0.99748594]
Correct: [36.015625 65.15625 ]%
Cross Entropy Loss: [2.1215038  0.98842144]
Correct: [33.75     66.796875]%
Cross Entropy Loss: [2.0807695 0.9069824]
Correct: [36.171875 69.375   ]%
Cross Entropy Loss: [1.9273688  0.86544514]
Correct: [38.515625 70.859375]%
Cross Entropy Loss: [1.9238075  0.85757244]
Correct: [37.578125 70.625   ]%
Cross Entropy Loss: [2.0320601  0.89199555]
Correct: [35.9375   69.453125]%
Cross Entropy Loss: [2.05024    0.98934346]
Correct: [36.640625 67.8125  ]%
Cross Entropy Loss: [1.7614399  0.75944966]
Correct: [42.734375 73.515625]%
Cross Entropy Loss: [1.925534   0.85670435]
Correct: [38.828125 71.875   ]%
Cross Entropy Loss: [1.9007986 0.8332615]
Correct: [40.46875  72.421875]%
Cross Entropy Loss: [1.9186542  0.83438635]
Correct: [39.921875 70.546875]%
Cross Entropy Loss: [1.8746021 0.8211813]
Correct: [41.171875 72.265625]%
Cross Entropy Loss: [1.7602314  0.79013455]
Correct: [42.96875 73.4375 ]%
Cross Entropy Loss: [1.8850124 0.8376728]
Correct: [39.21875 71.71875]%
Cross Entropy Loss: [1.811408  0.7245079]
Correct: [40.78125 75.46875]%
Cross Entropy Loss: [1.9930766  0.90081185]
Correct: [35.078125 70.703125]%
Cross Entropy Loss: [2.0154529 0.901935 ]
Correct: [35.625  69.0625]%
Cross Entropy Loss: [2.180222  1.0402296]
Correct: [33.90625  63.984375]%
Cross Entropy Loss: [2.042675   0.92094195]
Correct: [35.78125  69.296875]%
Cross Entropy Loss: [1.9534737 0.851793 ]
Correct: [38.046875 71.171875]%
Cross Entropy Loss: [1.9514538  0.85383034]
Correct: [38.828125 71.09375 ]%
Cross Entropy Loss: [1.8584185 0.7944455]
Correct: [38.125    73.203125]%
Cross Entropy Loss: [1.9999424 0.9131092]
Correct: [37.421875 69.296875]%
Cross Entropy Loss: [1.9190003 0.847613 ]
Correct: [38.359375 72.5     ]%
Cross Entropy Loss: [2.0072734  0.92615974]
Correct: [37.1875 70.3125]%
Cross Entropy Loss: [1.9437368 0.8993247]
Correct: [36.953125 70.3125  ]%
Cross Entropy Loss: [1.9171613  0.81850755]
Correct: [36.5625   72.109375]%
Cross Entropy Loss: [1.9808207 0.8681512]
Correct: [36.484375 70.625   ]%
Cross Entropy Loss: [1.8110718  0.78174305]
Correct: [41.5625   72.734375]%
Cross Entropy Loss: [2.125862 1.000222]
Correct: [34.53125  65.390625]%
Cross Entropy Loss: [2.0461168 0.8822401]
Correct: [35.625    69.765625]%
Cross Entropy Loss: [2.0368335 0.9592369]
Correct: [36.171875 68.28125 ]%
Cross Entropy Loss: [1.9582134  0.91381055]
Correct: [37.890625 70.15625 ]%
Cross Entropy Loss: [1.8364515 0.8389826]
Correct: [40.078125 70.703125]%
Cross Entropy Loss: [1.8883171 0.7942802]
Correct: [36.875   71.71875]%
Cross Entropy Loss: [1.9808524 0.830274 ]
Correct: [37.578125 71.25    ]%
Cross Entropy Loss: [2.0649805 0.9389828]
Correct: [34.53125 69.6875 ]%
Cross Entropy Loss: [1.9704504 0.8745142]
Correct: [36.5625   70.859375]%
Cross Entropy Loss: [1.9719826  0.90744084]
Correct: [36.484375 68.59375 ]%
Cross Entropy Loss: [1.7403939  0.71775883]
Correct: [42.578125 76.640625]%
Cross Entropy Loss: [2.0806084  0.96995324]
Correct: [34.6875   68.515625]%
Cross Entropy Loss: [1.8309376  0.73546416]
Correct: [38.671875 74.140625]%
Cross Entropy Loss: [1.7746108 0.7507173]
Correct: [43.046875 74.453125]%
Cross Entropy Loss: [1.8725427 0.7845746]
Correct: [39.6875  73.59375]%
Cross Entropy Loss: [2.0077446 0.9015192]
Correct: [36.953125 69.296875]%
Cross Entropy Loss: [1.8845736 0.8225733]
Correct: [38.125    72.890625]%
Cross Entropy Loss: [1.8909432 0.8260232]
Correct: [39.140625 71.015625]%
Cross Entropy Loss: [1.9915955 0.9004551]
Correct: [37.421875 69.765625]%
Cross Entropy Loss: [2.0216527 0.9416069]
Correct: [36.875  67.8125]%
Cross Entropy Loss: [2.0008621 0.9356631]
Correct: [35.390625 66.015625]%
Cross Entropy Loss: [1.9996936  0.87418413]
Correct: [37.265625 70.546875]%
Cross Entropy Loss: [2.0175574 0.8969595]
Correct: [35.15625  69.296875]%
Cross Entropy Loss: [2.1736145 0.9772995]
Correct: [32.1875   67.578125]%
Cross Entropy Loss: [1.8958956  0.77489245]
Correct: [38.4375   73.671875]%
Cross Entropy Loss: [1.9124041 0.8112958]
Correct: [38.59375  72.890625]%
Cross Entropy Loss: [1.7836368  0.81923103]
Correct: [40.078125 73.59375 ]%
Cross Entropy Loss: [1.8672546 0.7744461]
Correct: [41.5625   74.140625]%
Cross Entropy Loss: [1.8219055 0.7922637]
Correct: [38.90625  73.671875]%
Cross Entropy Loss: [2.0713143 0.8693514]
Correct: [35.546875 70.625   ]%
Cross Entropy Loss: [1.8485124  0.85592985]
Correct: [38.59375 70.9375 ]%
Cross Entropy Loss: [1.855629  0.8751441]
Correct: [39.765625 71.171875]%
Cross Entropy Loss: [1.9413006 0.8784059]
Correct: [38.203125 70.46875 ]%
Cross Entropy Loss: [2.001966  0.9464778]
Correct: [37.890625 69.6875  ]%
Cross Entropy Loss: [1.8575532 0.7590194]
Correct: [39.921875 75.15625 ]%
Cross Entropy Loss: [1.8918136 0.8250167]
Correct: [39.921875 73.046875]%
Cross Entropy Loss: [1.9186437  0.85178596]
Correct: [38.671875 72.8125  ]%
Cross Entropy Loss: [2.0408514 0.9232028]
Correct: [36.484375 70.078125]%
Cross Entropy Loss: [1.896796   0.84057844]
Correct: [38.75 71.25]%
Cross Entropy Loss: [1.9959234 0.8758749]
Correct: [36.953125 70.15625 ]%
Cross Entropy Loss: [2.0072932  0.88783616]
Correct: [37.578125 69.765625]%
Cross Entropy Loss: [1.74315   0.7753718]
Correct: [41.09375 73.75   ]%
Cross Entropy Loss: [1.8673702 0.7675727]
Correct: [39.53125  73.984375]%
Cross Entropy Loss: [2.0016038  0.89843196]
Correct: [38.125    69.921875]%
Cross Entropy Loss: [2.001402  0.8990204]
Correct: [36.71875 69.6875 ]%
Cross Entropy Loss: [1.7711271 0.7267686]
Correct: [42.03125  75.546875]%
Cross Entropy Loss: [1.9994249 0.8564911]
Correct: [38.125  70.9375]%
Cross Entropy Loss: [1.9475883 0.8168581]
Correct: [39.6875   73.046875]%
Cross Entropy Loss: [1.8392327 0.7327321]
Correct: [39.84375  76.328125]%
Cross Entropy Loss: [1.8312792  0.74983966]
Correct: [41.796875 73.75    ]%
Cross Entropy Loss: [1.7291515 0.7506291]
Correct: [42.34375 74.0625 ]%
Cross Entropy Loss: [2.0258403  0.87854844]
Correct: [35.234375 70.859375]%
Cross Entropy Loss: [1.9590372  0.83854675]
Correct: [36.171875 72.578125]%
Cross Entropy Loss: [1.8812119 0.8078213]
Correct: [38.828125 73.125   ]%
Cross Entropy Loss: [1.7979572 0.6917948]
Correct: [39.84375 76.71875]%
Cross Entropy Loss: [1.9815601  0.83466244]
Correct: [39.21875 71.25   ]%
Cross Entropy Loss: [1.8359292 0.8135235]
Correct: [39.375    72.421875]%
Cross Entropy Loss: [1.8439033 0.8367712]
Correct: [40.234375 71.171875]%
Cross Entropy Loss: [1.8491738 0.772687 ]
Correct: [40.9375   74.609375]%
Cross Entropy Loss: [1.7755102 0.7650427]
Correct: [42.65625 74.6875 ]%
Cross Entropy Loss: [1.7570293  0.72656125]
Correct: [43.125  76.5625]%
Cross Entropy Loss: [1.9024506  0.74392164]
Correct: [41.875    74.296875]%
Cross Entropy Loss: [1.7916753  0.71454906]
Correct: [41.40625 76.875  ]%
Cross Entropy Loss: [1.7586281 0.7206019]
Correct: [39.84375 75.625  ]%
Cross Entropy Loss: [1.9066198  0.87098444]
Correct: [38.671875 71.640625]%
Cross Entropy Loss: [1.8574002  0.82841396]
Correct: [41.015625 72.421875]%
Cross Entropy Loss: [1.9432628  0.82397985]
Correct: [37.421875 72.5     ]%
Cross Entropy Loss: [1.845761  0.8483675]
Correct: [40.234375 71.484375]%
Cross Entropy Loss: [1.7709032 0.7465607]
Correct: [42.03125 73.4375 ]%
Cross Entropy Loss: [2.010252  0.9226085]
Correct: [37.890625 68.984375]%
Cross Entropy Loss: [1.8785301 0.7842553]
Correct: [38.671875 72.65625 ]%
Cross Entropy Loss: [1.8122343 0.8174494]
Correct: [41.25    72.03125]%
Cross Entropy Loss: [1.8043875  0.73387396]
Correct: [39.375    75.234375]%
Cross Entropy Loss: [1.9359577 0.8675893]
Correct: [39.140625 70.78125 ]%
Cross Entropy Loss: [2.0243871  0.86184883]
Correct: [35.859375 71.171875]%
Cross Entropy Loss: [1.7832315  0.75763065]
Correct: [40.859375 74.609375]%
Cross Entropy Loss: [2.0323367 0.9217459]
Correct: [35.625   67.96875]%
Cross Entropy Loss: [1.8078743 0.7337283]
Correct: [40.234375 75.234375]%
Cross Entropy Loss: [2.095902   0.91733587]
Correct: [37.5     68.90625]%
Cross Entropy Loss: [1.8610016 0.7914856]
Correct: [38.28125 73.90625]%
Cross Entropy Loss: [1.9666817  0.76745856]
Correct: [38.828125 75.078125]%
Cross Entropy Loss: [1.8362296 0.7332736]
Correct: [39.21875 73.90625]%
Cross Entropy Loss: [1.9611698 0.8414934]
Correct: [37.734375 72.421875]%
Cross Entropy Loss: [1.9443098  0.84748536]
Correct: [36.5625   70.546875]%
Cross Entropy Loss: [1.7100394  0.73157245]
Correct: [42.5      75.390625]%
Cross Entropy Loss: [1.7344234 0.686546 ]
Correct: [41.40625  75.390625]%
Cross Entropy Loss: [2.2196267 0.9661806]
Correct: [33.203125 67.8125  ]%
Cross Entropy Loss: [1.8975483 0.8913821]
Correct: [39.21875  68.515625]%
Cross Entropy Loss: [2.0402424 0.8897041]
Correct: [35.234375 70.703125]%
Cross Entropy Loss: [1.9627292  0.76570493]
Correct: [37.96875 74.21875]%
Cross Entropy Loss: [1.9946296  0.85147095]
Correct: [37.8125   71.640625]%
Cross Entropy Loss: [1.9930117 0.8758448]
Correct: [36.40625 69.84375]%
Cross Entropy Loss: [1.8006637 0.7793001]
Correct: [40.78125  73.828125]%
Cross Entropy Loss: [1.8989598 0.7816679]
Correct: [38.90625 75.3125 ]%
Cross Entropy Loss: [1.9885483 0.8509863]
Correct: [35.9375   71.953125]%
Cross Entropy Loss: [1.9961727  0.81813097]
Correct: [35.      72.65625]%
Cross Entropy Loss: [1.9514248 0.8419062]
Correct: [35.625 72.5  ]%
Cross Entropy Loss: [1.9268863 0.8127308]
Correct: [38.046875 72.5     ]%
Cross Entropy Loss: [1.8251534 0.749963 ]
Correct: [38.59375  74.296875]%
Cross Entropy Loss: [1.847708   0.80535996]
Correct: [38.75     73.984375]%
Cross Entropy Loss: [1.77269   0.8101282]
Correct: [42.109375 73.4375  ]%
Cross Entropy Loss: [1.851477  0.7737516]
Correct: [40.3125   73.359375]%
Cross Entropy Loss: [1.8459237 0.7547766]
Correct: [39.6875   74.921875]%
Cross Entropy Loss: [1.96306    0.77251136]
Correct: [36.40625 74.0625 ]%
Cross Entropy Loss: [1.847846  0.7621843]
Correct: [40.3125 74.375 ]%
Cross Entropy Loss: [2.021227   0.84209365]
Correct: [36.328125 72.1875  ]%
Cross Entropy Loss: [1.8221203  0.75928843]
Correct: [39.53125 73.75   ]%
Cross Entropy Loss: [1.941445  0.7407073]
Correct: [37.5      76.171875]%
Cross Entropy Loss: [1.9379956 0.7936501]
Correct: [37.1875   72.890625]%
Cross Entropy Loss: [1.7764927 0.7348779]
Correct: [43.046875 74.765625]%
Cross Entropy Loss: [1.9029028 0.7937895]
Correct: [38.90625 72.5    ]%
Cross Entropy Loss: [1.7761481 0.7383987]
Correct: [39.84375  74.453125]%
Cross Entropy Loss: [1.9013875  0.79574096]
Correct: [39.21875 73.59375]%
Cross Entropy Loss: [1.8257484  0.77694726]
Correct: [39.140625 73.75    ]%
Cross Entropy Loss: [1.8944756 0.776986 ]
Correct: [40.78125  73.359375]%
Cross Entropy Loss: [1.8227527  0.80487216]
Correct: [37.890625 72.421875]%
Cross Entropy Loss: [2.0018544  0.90893173]
Correct: [35.390625 68.125   ]%
Cross Entropy Loss: [1.9835513 0.8218819]
Correct: [37.890625 71.09375 ]%
Cross Entropy Loss: [1.887186 0.898434]
Correct: [40.15625 69.53125]%
Cross Entropy Loss: [1.8470852  0.83126515]
Correct: [39.921875 72.8125  ]%
Cross Entropy Loss: [1.7540865 0.8286497]
Correct: [41.953125 71.71875 ]%
Cross Entropy Loss: [1.8571377 0.7411334]
Correct: [38.515625 75.3125  ]%
Cross Entropy Loss: [1.9381046  0.76549727]
Correct: [38.59375 72.8125 ]%
Cross Entropy Loss: [1.7097151  0.69064677]
Correct: [43.671875 76.484375]%
Cross Entropy Loss: [2.0125422 0.8248552]
Correct: [35.390625 72.34375 ]%
Cross Entropy Loss: [1.8507038 0.7185246]
Correct: [38.75    75.78125]%
Cross Entropy Loss: [1.8475326  0.76161605]
Correct: [40.234375 73.28125 ]%
Cross Entropy Loss: [1.7858212  0.80732393]
Correct: [40.546875 72.109375]%
Cross Entropy Loss: [1.9122384 0.8764744]
Correct: [39.0625  71.40625]%
Cross Entropy Loss: [1.8023087 0.7331826]
Correct: [39.140625 75.390625]%
Cross Entropy Loss: [1.9203488 0.8596018]
Correct: [37.8125   71.171875]%
Cross Entropy Loss: [1.7938801 0.7361818]
Correct: [40.78125  75.703125]%
Cross Entropy Loss: [1.8634443  0.72198576]
Correct: [37.5    75.3125]%
Cross Entropy Loss: [1.951506   0.77085984]
Correct: [38.203125 72.96875 ]%
Cross Entropy Loss: [1.9493883 0.7860302]
Correct: [38.90625 73.59375]%
Cross Entropy Loss: [1.9975573 0.8228611]
Correct: [35.859375 71.25    ]%
Cross Entropy Loss: [1.8333766 0.7738951]
Correct: [41.328125 73.90625 ]%
Cross Entropy Loss: [1.9985955 0.8937216]
Correct: [35.703125 68.90625 ]%
Cross Entropy Loss: [1.6448488  0.71342045]
Correct: [42.421875 75.859375]%
Cross Entropy Loss: [1.9145508  0.79811525]
Correct: [37.109375 73.59375 ]%
Cross Entropy Loss: [1.8653772  0.78147805]
Correct: [40.      73.90625]%
Cross Entropy Loss: [1.8586502  0.82692623]
Correct: [38.359375 73.359375]%
Cross Entropy Loss: [2.0272176  0.90049267]
Correct: [34.921875 69.296875]%
Cross Entropy Loss: [1.8368189 0.7983962]
Correct: [42.1875  71.71875]%
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 46.95%
From layer 1:
Accuracy: 68.86%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 45.13%
From layer 1:
Accuracy: 78.47%
In [ ]:
accs = {'train': train_acc, 'test': test_acc}
with open('final_results/' + model_name[:-3] + '_accuracies.pkl', 'wb') as f:
    pickle.dump(accs, f)
In [ ]:
if not args.augment or True:
    # if snn_samples already exist, don't recompute
    accs = {}
    try:
        snn_samples
    except:
        snn_samples, targets = get_samples(SNN, train_loader, args.n_hidden, device)
    for cat in [False, True]:
        test_accs = []
        train_accs = []
        for i in range(10):
            with torch.no_grad():
                out_projs, acc, losses_out = train_out_proj_fast(SNN, args, 30, 60, snn_samples, train_loader.y, cat=cat, lr=3e-4, weight_decay=0)
            print('Mean abs weights', out_projs[-1].out_proj.weight.abs().mean())
            test_accs.append(get_accuracy(SNN, out_projs, test_loader, device, cat=cat)[0])
            train_accs.append(get_accuracy(SNN, out_projs, train_loader, device, cat=cat)[0])
        test_accs = torch.stack([torch.tensor(ta) for ta in test_accs])
        train_accs = torch.stack([torch.tensor(ta) for ta in train_accs])
        print(f'Fast Classifier Mean Test Accuracy: {100*torch.mean(test_accs, dim=0)}, Std: {100*torch.std(test_accs, dim=0)}')
        print(f'Fast Classifier Mean Train Accuracy: {100*torch.mean(train_accs, dim=0)}, Std: {100*torch.std(train_accs, dim=0)}')
        if cat:
            accs['train_cat'] = train_accs
            accs['test_cat'] = test_accs
        else:
            accs['train'] = train_accs
            accs['test'] = test_accs
    with open('final_results/' + model_name[:-3] + '_accuracies.pkl', 'wb') as f:
        pickle.dump(accs, f)
  0%|          | 0/127 [00:00<?, ?it/s]
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.242321 3.597268]
Correct: [16.01275135 25.64982835]%
Cross Entropy Loss: [2.6065571 1.7033745]
Correct: [28.28592447 47.49877391]%
Cross Entropy Loss: [2.308533  1.3958906]
Correct: [33.14124571 56.33889161]%
Cross Entropy Loss: [2.035802  1.1075165]
Correct: [38.46248161 63.2540461 ]%
Cross Entropy Loss: [1.8709322 0.9424524]
Correct: [41.67484061 68.23197646]%
Cross Entropy Loss: [1.740124  0.8482229]
Correct: [44.62972045 71.97155468]%
Cross Entropy Loss: [1.6877215  0.76048875]
Correct: [45.81902894 74.86512997]%
Cross Entropy Loss: [1.591468  0.6946195]
Correct: [48.62677783 76.8023541 ]%
Cross Entropy Loss: [1.5229774 0.6221868]
Correct: [50.51495831 79.07062285]%
Cross Entropy Loss: [1.458724  0.5892946]
Correct: [52.0107896 80.8239333]%
Cross Entropy Loss: [1.3771989  0.49151567]
Correct: [54.04610103 83.96272683]%
Cross Entropy Loss: [1.3287163  0.45680276]
Correct: [55.37027955 85.26238352]%
Cross Entropy Loss: [1.2989422  0.48010737]
Correct: [56.3266307  84.47768514]%
Cross Entropy Loss: [1.2240931 0.4037957]
Correct: [58.3374203  87.13830309]%
Cross Entropy Loss: [1.1788702 0.3904544]
Correct: [59.96812163 87.3467386 ]%
Cross Entropy Loss: [1.1600385  0.41012552]
Correct: [60.07846984 86.01029917]%
Cross Entropy Loss: [1.1524144 0.3574405]
Correct: [60.92447278 87.98430603]%
Cross Entropy Loss: [1.0817355 0.3115347]
Correct: [62.39578225 90.15448749]%
Cross Entropy Loss: [1.0559404  0.31925252]
Correct: [63.36439431 89.97057381]%
Cross Entropy Loss: [1.0346327  0.31040132]
Correct: [63.73222168 89.81118195]%
Cross Entropy Loss: [1.0102172  0.27367902]
Correct: [64.92153016 91.29475233]%
Cross Entropy Loss: [0.9743725 0.2954081]
Correct: [66.35605689 90.8656204 ]%
Cross Entropy Loss: [0.98448575 0.29440653]
Correct: [66.20892594 90.52231486]%
Cross Entropy Loss: [0.94053906 0.25010514]
Correct: [67.10397254 92.31240804]%
Cross Entropy Loss: [0.94230896 0.23350498]
Correct: [67.54536538 92.93771457]%
Cross Entropy Loss: [0.92381734 0.22419865]
Correct: [68.41589014 93.35458558]%
Cross Entropy Loss: [0.87555194 0.22176147]
Correct: [69.75232957 93.21971555]%
Cross Entropy Loss: [0.85328805 0.19395205]
Correct: [70.32859245 94.21284944]%
Cross Entropy Loss: [0.85153246 0.18605126]
Correct: [70.97842079 94.33545856]%
Cross Entropy Loss: [0.84349555 0.17225222]
Correct: [71.00294262 95.07111329]%
Mean abs weights tensor(0.0282, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 42.49%
From layer 1:
Accuracy: 59.28%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 68.37%
From layer 1:
Accuracy: 94.26%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.073899 3.084285]
Correct: [17.04266797 27.88131437]%
Cross Entropy Loss: [2.4571614 1.6696565]
Correct: [29.83079941 48.1976459 ]%
Cross Entropy Loss: [2.158677  1.3225952]
Correct: [35.96125552 57.30750368]%
Cross Entropy Loss: [1.9980195 1.1241448]
Correct: [40.17900932 63.14369789]%
Cross Entropy Loss: [1.8545433 0.9479055]
Correct: [43.0725846  68.86954389]%
Cross Entropy Loss: [1.7089391 0.810201 ]
Correct: [45.61059343 72.63364394]%
Cross Entropy Loss: [1.6610197  0.74785984]
Correct: [47.51103482 74.77930358]%
Cross Entropy Loss: [1.5396955 0.6747855]
Correct: [50.20843551 76.71652771]%
Cross Entropy Loss: [1.4511102  0.57624143]
Correct: [52.55026974 80.48062776]%
Cross Entropy Loss: [1.4498479  0.54617566]
Correct: [52.57479156 82.17263364]%
Cross Entropy Loss: [1.3869054 0.5143402]
Correct: [54.3158411 83.6316822]%
Cross Entropy Loss: [1.3152362 0.4659862]
Correct: [56.71897989 84.22020598]%
Cross Entropy Loss: [1.2791835 0.4460537]
Correct: [56.66993624 84.93133889]%
Cross Entropy Loss: [1.188095  0.4030684]
Correct: [59.35507602 86.7459539 ]%
Cross Entropy Loss: [1.210667  0.3967458]
Correct: [59.15890142 87.07699853]%
Cross Entropy Loss: [1.1536107  0.39789623]
Correct: [60.7405591  86.96665032]%
Cross Entropy Loss: [1.1533293  0.33190638]
Correct: [60.63021089 89.60274644]%
Cross Entropy Loss: [1.0722674  0.33376732]
Correct: [63.67091712 89.36978911]%
Cross Entropy Loss: [1.0457838 0.3104883]
Correct: [63.92839627 90.20353114]%
Cross Entropy Loss: [1.044988  0.2806283]
Correct: [65.10544385 91.07405591]%
Cross Entropy Loss: [0.99554724 0.24690443]
Correct: [66.061795   92.42275625]%
Cross Entropy Loss: [0.97746533 0.26394287]
Correct: [66.52770966 91.49092692]%
Cross Entropy Loss: [0.95890874 0.24020644]
Correct: [67.25110348 92.6679745 ]%
Cross Entropy Loss: [0.95711505 0.25717342]
Correct: [67.12849436 91.85875429]%
Cross Entropy Loss: [0.9108462 0.2540686]
Correct: [68.90632663 91.95684159]%
Cross Entropy Loss: [0.9289546  0.21630014]
Correct: [67.82736636 93.29328102]%
Cross Entropy Loss: [0.90583116 0.20905387]
Correct: [68.74693477 93.41589014]%
Cross Entropy Loss: [0.85201806 0.17822443]
Correct: [70.36537518 94.85041687]%
Cross Entropy Loss: [0.8225846  0.18970598]
Correct: [71.83668465 94.32319765]%
Cross Entropy Loss: [0.8006406  0.18231758]
Correct: [71.92251103 94.37224129]%
Mean abs weights tensor(0.0284, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 45.58%
From layer 1:
Accuracy: 60.20%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 72.36%
From layer 1:
Accuracy: 94.04%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.817585  3.1494026]
Correct: [16.33153507 26.90044139]%
Cross Entropy Loss: [2.5496001 1.720337 ]
Correct: [29.10740559 47.78077489]%
Cross Entropy Loss: [2.1929011 1.2976797]
Correct: [35.18881805 58.5335949 ]%
Cross Entropy Loss: [1.9794525 1.1509584]
Correct: [39.68857283 63.37665522]%
Cross Entropy Loss: [1.8100059 0.9004014]
Correct: [42.91319274 70.14467876]%
Cross Entropy Loss: [1.7021078 0.8466746]
Correct: [45.29180971 71.66503188]%
Cross Entropy Loss: [1.5714142 0.7334806]
Correct: [49.25208436 75.82148112]%
Cross Entropy Loss: [1.5303166  0.65792185]
Correct: [50.         77.74644434]%
Cross Entropy Loss: [1.4232478 0.6005354]
Correct: [52.48896518 80.08827857]%
Cross Entropy Loss: [1.4100878 0.557266 ]
Correct: [52.74644434 81.17949975]%
Cross Entropy Loss: [1.3389598  0.52500254]
Correct: [55.37027955 82.49141736]%
Cross Entropy Loss: [1.2852685 0.4657681]
Correct: [56.65767533 85.0416871 ]%
Cross Entropy Loss: [1.2536222  0.43154344]
Correct: [57.55272192 85.36047082]%
Cross Entropy Loss: [1.1850439  0.39446685]
Correct: [60.07846984 87.10152035]%
Cross Entropy Loss: [1.1486057  0.39163616]
Correct: [61.36586562 87.27317312]%
Cross Entropy Loss: [1.138888   0.33588448]
Correct: [61.78273664 89.1613536 ]%
Cross Entropy Loss: [1.1087989  0.33060727]
Correct: [61.96665032 89.22265817]%
Cross Entropy Loss: [1.0505157  0.34494877]
Correct: [63.91613536 88.60961256]%
Cross Entropy Loss: [1.0593307 0.2977021]
Correct: [63.87935262 90.52231486]%
Cross Entropy Loss: [1.0332059  0.29888126]
Correct: [64.67631192 90.15448749]%
Cross Entropy Loss: [0.9646623  0.26796493]
Correct: [66.50318784 91.25796959]%
Cross Entropy Loss: [1.000825   0.27082965]
Correct: [65.92692496 91.71162334]%
Cross Entropy Loss: [0.9209945  0.22360133]
Correct: [69.00441393 93.33006376]%
Cross Entropy Loss: [0.9411144  0.22391175]
Correct: [67.4717999  93.08484551]%
Cross Entropy Loss: [0.8951251 0.2265266]
Correct: [69.42128494 93.28102011]%
Cross Entropy Loss: [0.8751516  0.18989967]
Correct: [69.44580677 94.42128494]%
Cross Entropy Loss: [0.86485827 0.20826614]
Correct: [69.75232957 93.58754291]%
Cross Entropy Loss: [0.8685733  0.20807831]
Correct: [70.26728789 93.73467386]%
Cross Entropy Loss: [0.82083774 0.17242521]
Correct: [71.65277097 95.07111329]%
Cross Entropy Loss: [0.79769623 0.24366604]
Correct: [72.42520844 92.6679745 ]%
Mean abs weights tensor(0.0282, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 45.19%
From layer 1:
Accuracy: 62.46%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 73.31%
From layer 1:
Accuracy: 94.62%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.327544 3.121484]
Correct: [16.18440412 27.15792055]%
Cross Entropy Loss: [2.4879038 1.7302457]
Correct: [29.95340853 47.17999019]%
Cross Entropy Loss: [2.1704276 1.323708 ]
Correct: [35.82638548 57.14811182]%
Cross Entropy Loss: [1.9856046 1.0945578]
Correct: [39.72535557 63.89161354]%
Cross Entropy Loss: [1.8662959 0.9745579]
Correct: [43.39136832 68.13388916]%
Cross Entropy Loss: [1.7445215  0.87123567]
Correct: [45.23050515 71.72633644]%
Cross Entropy Loss: [1.6118568 0.696673 ]
Correct: [48.13634134 76.37322217]%
Cross Entropy Loss: [1.5618907 0.695606 ]
Correct: [49.74252084 76.55713585]%
Cross Entropy Loss: [1.4951458  0.58966815]
Correct: [52.12113781 80.89749877]%
Cross Entropy Loss: [1.4569771 0.5676034]
Correct: [52.63609613 80.95880333]%
Cross Entropy Loss: [1.3763602  0.51568174]
Correct: [55.1250613  82.99411476]%
Cross Entropy Loss: [1.3067043  0.47274423]
Correct: [56.60863168 84.67385974]%
Cross Entropy Loss: [1.2475032  0.43454126]
Correct: [58.32515939 85.69151545]%
Cross Entropy Loss: [1.2692385  0.40325662]
Correct: [58.54585581 86.66012751]%
Cross Entropy Loss: [1.1839606  0.40141073]
Correct: [60.2378617  86.50073565]%
Cross Entropy Loss: [1.1717148  0.38092324]
Correct: [61.19421285 87.53065228]%
Cross Entropy Loss: [1.1394297  0.33831626]
Correct: [61.47621383 89.36978911]%
Cross Entropy Loss: [1.1169313 0.3335592]
Correct: [61.88082393 89.08778813]%
Cross Entropy Loss: [1.0985036  0.35411304]
Correct: [62.849436   88.48700343]%
Cross Entropy Loss: [1.000474   0.27461743]
Correct: [65.62040216 91.29475233]%
Cross Entropy Loss: [1.0095401 0.2693873]
Correct: [65.63266307 91.3683178 ]%
Cross Entropy Loss: [0.97859585 0.2549964 ]
Correct: [66.41736145 92.1652771 ]%
Cross Entropy Loss: [0.9864843  0.26967803]
Correct: [66.51544875 91.29475233]%
Cross Entropy Loss: [0.9471579  0.22369798]
Correct: [67.32466896 93.14615007]%
Cross Entropy Loss: [0.929046   0.21375218]
Correct: [67.87641    93.61206474]%
Cross Entropy Loss: [0.9104445  0.20008993]
Correct: [68.79597842 94.28641491]%
Cross Entropy Loss: [0.8795324  0.19315547]
Correct: [70.05885238 94.54389407]%
Cross Entropy Loss: [0.8492627  0.19857624]
Correct: [70.69641981 94.04119667]%
Cross Entropy Loss: [0.8293602 0.1812293]
Correct: [71.56694458 94.69102501]%
Cross Entropy Loss: [0.8178792  0.17953587]
Correct: [71.787641   94.72780775]%
Mean abs weights tensor(0.0281, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 45.32%
From layer 1:
Accuracy: 63.91%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 72.17%
From layer 1:
Accuracy: 97.52%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.070601  3.1317592]
Correct: [16.44188328 24.79156449]%
Cross Entropy Loss: [2.4527628 1.6880032]
Correct: [30.23540951 47.81755763]%
Cross Entropy Loss: [2.1775837 1.317041 ]
Correct: [35.77734183 56.62089259]%
Cross Entropy Loss: [1.9400244 1.1106669]
Correct: [39.94605199 63.53604708]%
Cross Entropy Loss: [1.8046176  0.93704015]
Correct: [43.30554193 68.89406572]%
Cross Entropy Loss: [1.6927974  0.83194435]
Correct: [45.91711623 72.20451202]%
Cross Entropy Loss: [1.606057  0.7346853]
Correct: [49.05590976 74.97547818]%
Cross Entropy Loss: [1.5419457  0.63257295]
Correct: [50.80922021 78.75183914]%
Cross Entropy Loss: [1.4753547 0.6120853]
Correct: [52.62383521 79.80627759]%
Cross Entropy Loss: [1.4268469 0.5294239]
Correct: [53.20009809 82.12359   ]%
Cross Entropy Loss: [1.3741575 0.5099843]
Correct: [55.52967141 82.82246199]%
Cross Entropy Loss: [1.3249209  0.49259162]
Correct: [56.58410986 83.69298676]%
Cross Entropy Loss: [1.2742426 0.4784017]
Correct: [57.79794017 83.85237862]%
Cross Entropy Loss: [1.2393807  0.42719865]
Correct: [59.31829328 85.7405591 ]%
Cross Entropy Loss: [1.1515131  0.35231334]
Correct: [61.25551741 88.57282982]%
Cross Entropy Loss: [1.1796793  0.34744847]
Correct: [60.6424718  88.94065718]%
Cross Entropy Loss: [1.0937381 0.3324708]
Correct: [63.43795978 89.28396273]%
Cross Entropy Loss: [1.0860159  0.33217537]
Correct: [63.54830799 88.78126533]%
Cross Entropy Loss: [1.0553398  0.30146846]
Correct: [64.43109367 90.35066209]%
Cross Entropy Loss: [1.0420817 0.2994347]
Correct: [64.73761648 90.31387935]%
Cross Entropy Loss: [1.0087656  0.27375016]
Correct: [66.35605689 91.76066699]%
Cross Entropy Loss: [0.98227817 0.24844377]
Correct: [66.82197155 92.21432075]%
Cross Entropy Loss: [0.95235205 0.23944178]
Correct: [67.61893085 92.6679745 ]%
Cross Entropy Loss: [0.91979766 0.26677158]
Correct: [68.20745463 91.6625797 ]%
Cross Entropy Loss: [0.88821006 0.22378337]
Correct: [69.1760667  93.02354095]%
Cross Entropy Loss: [0.8804046 0.2257684]
Correct: [70.15693968 92.83962727]%
Cross Entropy Loss: [0.8652985  0.18099944]
Correct: [70.90485532 94.96076508]%
Cross Entropy Loss: [0.8585656  0.17009576]
Correct: [70.26728789 95.35311427]%
Cross Entropy Loss: [0.8705536  0.20302601]
Correct: [70.03433055 93.83276116]%
Cross Entropy Loss: [0.82587284 0.18966936]
Correct: [71.49337911 94.4825895 ]%
Mean abs weights tensor(0.0281, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 46.38%
From layer 1:
Accuracy: 58.92%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 73.25%
From layer 1:
Accuracy: 94.23%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.4483128 3.1161757]
Correct: [15.64492398 26.79009318]%
Cross Entropy Loss: [2.5584948 1.7132568]
Correct: [28.65375184 47.57233938]%
Cross Entropy Loss: [2.1659534 1.3345348]
Correct: [34.84551251 57.41785189]%
Cross Entropy Loss: [2.015003  1.0901335]
Correct: [38.79352624 63.86709171]%
Cross Entropy Loss: [1.8691787 1.0044407]
Correct: [42.36145169 66.8710152 ]%
Cross Entropy Loss: [1.7254089 0.8297699]
Correct: [45.95389897 72.25355566]%
Cross Entropy Loss: [1.6569808  0.74367225]
Correct: [47.6949485  74.97547818]%
Cross Entropy Loss: [1.5617207  0.71452576]
Correct: [49.87739088 76.66748406]%
Cross Entropy Loss: [1.5058035  0.60052973]
Correct: [51.75331045 79.84306032]%
Cross Entropy Loss: [1.4604566  0.54324216]
Correct: [52.28052967 81.93967631]%
Cross Entropy Loss: [1.3802576 0.5138194]
Correct: [54.64688573 82.95733203]%
Cross Entropy Loss: [1.3599819  0.47766754]
Correct: [55.16184404 83.85237862]%
Cross Entropy Loss: [1.2893304 0.4804259]
Correct: [57.29524277 84.26924963]%
Cross Entropy Loss: [1.2434955  0.42147464]
Correct: [58.68072585 86.23099559]%
Cross Entropy Loss: [1.230147   0.40258113]
Correct: [59.77194703 86.70917116]%
Cross Entropy Loss: [1.1664425 0.3494373]
Correct: [60.34820991 88.92839627]%
Cross Entropy Loss: [1.1241125 0.3357098]
Correct: [61.39038744 89.02648357]%
Cross Entropy Loss: [1.1038005 0.3036447]
Correct: [62.22412948 90.52231486]%
Cross Entropy Loss: [1.0950699 0.3065696]
Correct: [62.99656694 90.47327121]%
Cross Entropy Loss: [1.0616846  0.28493968]
Correct: [63.80578715 90.9637077 ]%
Cross Entropy Loss: [1.0119269  0.27027607]
Correct: [65.06866111 91.77292791]%
Cross Entropy Loss: [0.9968296  0.27108988]
Correct: [65.91466405 91.8710152 ]%
Cross Entropy Loss: [0.9275687  0.22652677]
Correct: [67.69249632 92.91319274]%
Cross Entropy Loss: [0.9371393  0.25098255]
Correct: [67.52084355 91.98136341]%
Cross Entropy Loss: [0.9379846  0.21601027]
Correct: [68.25649828 93.41589014]%
Cross Entropy Loss: [0.9290978  0.20331538]
Correct: [68.0725846  94.00441393]%
Cross Entropy Loss: [0.9017016  0.19907373]
Correct: [68.79597842 94.20058852]%
Cross Entropy Loss: [0.8746368 0.1970738]
Correct: [69.56841589 93.99215302]%
Cross Entropy Loss: [0.87055296 0.20123456]
Correct: [69.83815596 93.80823933]%
Cross Entropy Loss: [0.820621   0.16774909]
Correct: [71.87346739 95.32859245]%
Mean abs weights tensor(0.0282, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 45.32%
From layer 1:
Accuracy: 62.72%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 72.84%
From layer 1:
Accuracy: 96.26%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.0607557 3.0031996]
Correct: [15.62040216 27.83227072]%
Cross Entropy Loss: [2.5603333 1.6726999]
Correct: [28.911231   48.33251594]%
Cross Entropy Loss: [2.2113397 1.3044305]
Correct: [35.50760177 58.1289848 ]%
Cross Entropy Loss: [1.9778465 1.1184703]
Correct: [39.78666013 63.90387445]%
Cross Entropy Loss: [1.8704928 0.9091276]
Correct: [42.9744973  69.55615498]%
Cross Entropy Loss: [1.728939   0.85666287]
Correct: [46.10102992 71.43207455]%
Cross Entropy Loss: [1.6569219  0.71602404]
Correct: [47.52329573 75.85826385]%
Cross Entropy Loss: [1.550477  0.6428372]
Correct: [50.02452182 78.65375184]%
Cross Entropy Loss: [1.4984617 0.6055265]
Correct: [51.29965669 79.47523296]%
Cross Entropy Loss: [1.3847317 0.5530833]
Correct: [54.34036292 81.7312408 ]%
Cross Entropy Loss: [1.3731031 0.5048047]
Correct: [54.46297205 82.90828838]%
Cross Entropy Loss: [1.3175362  0.47308514]
Correct: [57.08680726 84.83325159]%
Cross Entropy Loss: [1.2559967  0.44578034]
Correct: [58.00637567 85.2378617 ]%
Cross Entropy Loss: [1.2617377  0.43368855]
Correct: [58.0308975  85.81412457]%
Cross Entropy Loss: [1.184549  0.3856602]
Correct: [60.06620893 87.54291319]%
Cross Entropy Loss: [1.1691493  0.36697918]
Correct: [60.80186366 88.3521334 ]%
Cross Entropy Loss: [1.1113313  0.33461976]
Correct: [62.72682688 89.44335459]%
Cross Entropy Loss: [1.0981593 0.3232142]
Correct: [63.08239333 89.52918097]%
Cross Entropy Loss: [1.0731611 0.3194055]
Correct: [63.60961256 89.56596371]%
Cross Entropy Loss: [1.0358571  0.30523935]
Correct: [65.24031388 90.01961746]%
Cross Entropy Loss: [1.0507315  0.26741442]
Correct: [65.19127023 91.51544875]%
Cross Entropy Loss: [1.0014249  0.26470965]
Correct: [66.04953409 92.05492889]%
Cross Entropy Loss: [0.9526882  0.22872484]
Correct: [67.68023541 93.25649828]%
Cross Entropy Loss: [0.94942486 0.25545955]
Correct: [67.52084355 92.04266797]%
Cross Entropy Loss: [0.9085201  0.23894475]
Correct: [68.64884747 92.81510544]%
Cross Entropy Loss: [0.93354386 0.2266892 ]
Correct: [67.85188818 93.21971555]%
Cross Entropy Loss: [0.86580855 0.19915287]
Correct: [70.31633154 94.02893575]%
Cross Entropy Loss: [0.8685792  0.24224716]
Correct: [70.34085336 92.39823443]%
Cross Entropy Loss: [0.833374   0.17090915]
Correct: [71.16233448 94.85041687]%
Cross Entropy Loss: [0.81966025 0.17884849]
Correct: [71.99607651 94.81363413]%
Mean abs weights tensor(0.0281, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 46.64%
From layer 1:
Accuracy: 63.03%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 73.06%
From layer 1:
Accuracy: 97.09%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.0985374 3.2944996]
Correct: [16.07405591 25.698872  ]%
Cross Entropy Loss: [2.455575 1.700628]
Correct: [29.76949485 47.41294752]%
Cross Entropy Loss: [2.2019534 1.36118  ]
Correct: [35.60568906 56.74350172]%
Cross Entropy Loss: [2.0125885 1.1350266]
Correct: [39.33300638 63.37665522]%
Cross Entropy Loss: [1.796655  0.9361459]
Correct: [43.80823933 69.29867582]%
Cross Entropy Loss: [1.7527888 0.84369  ]
Correct: [44.94850417 71.65277097]%
Cross Entropy Loss: [1.6077286  0.74455225]
Correct: [48.14860226 75.49043649]%
Cross Entropy Loss: [1.5420077  0.69764966]
Correct: [49.85286905 76.93722413]%
Cross Entropy Loss: [1.4828748 0.5889198]
Correct: [51.63070132 80.08827857]%
Cross Entropy Loss: [1.402006  0.5506784]
Correct: [53.60470819 81.63315351]%
Cross Entropy Loss: [1.351154  0.5050778]
Correct: [55.17410495 83.19028936]%
Cross Entropy Loss: [1.3070261  0.45732045]
Correct: [56.11819519 84.99264345]%
Cross Entropy Loss: [1.2810955 0.4435083]
Correct: [57.24619912 85.49534085]%
Cross Entropy Loss: [1.2076586  0.41869372]
Correct: [59.31829328 86.2432565 ]%
Cross Entropy Loss: [1.1462864  0.38581115]
Correct: [61.35360471 87.28543404]%
Cross Entropy Loss: [1.1318189  0.35406032]
Correct: [61.53751839 88.58509073]%
Cross Entropy Loss: [1.1145303  0.36534613]
Correct: [62.38352133 88.03334968]%
Cross Entropy Loss: [1.099712  0.3723377]
Correct: [62.73908779 87.69004414]%
Cross Entropy Loss: [1.0948285  0.32933536]
Correct: [63.14369789 89.2594409 ]%
Cross Entropy Loss: [1.003488   0.28201535]
Correct: [65.25257479 90.92692496]%
Cross Entropy Loss: [1.0008978  0.26445487]
Correct: [65.60814125 91.42962236]%
Cross Entropy Loss: [0.9835364 0.2391625]
Correct: [66.34379598 92.80284453]%
Cross Entropy Loss: [0.99835587 0.2664332 ]
Correct: [66.71162334 91.38057872]%
Cross Entropy Loss: [0.9341456  0.24310821]
Correct: [68.35458558 92.63119176]%
Cross Entropy Loss: [0.88600963 0.22124572]
Correct: [68.80823933 93.28102011]%
Cross Entropy Loss: [0.8852154  0.21745065]
Correct: [69.76459049 93.23197646]%
Cross Entropy Loss: [0.86673075 0.21071863]
Correct: [70.36537518 93.74693477]%
Cross Entropy Loss: [0.86694473 0.18618192]
Correct: [70.47572339 94.5806768 ]%
Cross Entropy Loss: [0.83385706 0.18601525]
Correct: [71.22363904 94.28641491]%
Cross Entropy Loss: [0.81904113 0.18668696]
Correct: [71.62824914 94.5806768 ]%
Mean abs weights tensor(0.0282, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 44.57%
From layer 1:
Accuracy: 63.34%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 73.85%
From layer 1:
Accuracy: 97.81%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.7971826 3.2862742]
Correct: [13.8548308  25.31878372]%
Cross Entropy Loss: [2.621397  1.7210925]
Correct: [27.75870525 46.29720451]%
Cross Entropy Loss: [2.2450938 1.2936238]
Correct: [34.17116233 57.82246199]%
Cross Entropy Loss: [2.0452032 1.131343 ]
Correct: [38.13143698 62.69004414]%
Cross Entropy Loss: [1.8787491  0.95713943]
Correct: [41.68710152 68.34232467]%
Cross Entropy Loss: [1.7834831 0.8576563]
Correct: [44.39676312 71.93477195]%
Cross Entropy Loss: [1.6640488 0.7480555]
Correct: [46.81216282 75.17165277]%
Cross Entropy Loss: [1.5685259  0.68799865]
Correct: [48.98234429 77.02305051]%
Cross Entropy Loss: [1.4929684 0.5893494]
Correct: [51.02991663 79.85532124]%
Cross Entropy Loss: [1.4721227 0.5467555]
Correct: [52.42766062 82.07454635]%
Cross Entropy Loss: [1.4286007 0.5760207]
Correct: [53.99705738 81.17949975]%
Cross Entropy Loss: [1.3564253  0.47276524]
Correct: [55.98332516 84.60029426]%
Cross Entropy Loss: [1.2754017  0.45800918]
Correct: [57.25846003 85.21333987]%
Cross Entropy Loss: [1.2299513  0.42598176]
Correct: [58.70524767 86.23099559]%
Cross Entropy Loss: [1.1954603  0.34833822]
Correct: [59.57577244 88.96517901]%
Cross Entropy Loss: [1.1853702 0.3482327]
Correct: [60.22560078 88.74448259]%
Cross Entropy Loss: [1.1468145  0.33548844]
Correct: [61.02256008 89.29622364]%
Cross Entropy Loss: [1.1210716  0.31688228]
Correct: [62.33447769 89.51692006]%
Cross Entropy Loss: [1.1064711  0.28780884]
Correct: [62.62873958 91.03727317]%
Cross Entropy Loss: [1.0739075  0.27798635]
Correct: [64.21039725 91.30701324]%
Cross Entropy Loss: [0.99441224 0.26528   ]
Correct: [65.87788131 91.6625797 ]%
Cross Entropy Loss: [0.97331583 0.25762606]
Correct: [66.29475233 91.77292791]%
Cross Entropy Loss: [0.96135193 0.25288576]
Correct: [66.89553703 92.15301618]%
Cross Entropy Loss: [0.9203157  0.21776143]
Correct: [68.34232467 93.40362923]%
Cross Entropy Loss: [0.9123857  0.22536948]
Correct: [68.50171653 93.10936734]%
Cross Entropy Loss: [0.9121338  0.20319808]
Correct: [68.74693477 94.02893575]%
Cross Entropy Loss: [0.8847519  0.20627722]
Correct: [69.51937224 93.62432565]%
Cross Entropy Loss: [0.85521233 0.2008538 ]
Correct: [70.48798431 93.86954389]%
Cross Entropy Loss: [0.86435163 0.1951009 ]
Correct: [70.15693968 93.99215302]%
Cross Entropy Loss: [0.8571837  0.16028777]
Correct: [70.34085336 95.52476704]%
Mean abs weights tensor(0.0282, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 45.54%
From layer 1:
Accuracy: 64.09%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 74.02%
From layer 1:
Accuracy: 95.82%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.0969534 3.2174468]
Correct: [17.03040706 25.51495831]%
Cross Entropy Loss: [2.429454  1.7140265]
Correct: [30.88523786 47.00833742]%
Cross Entropy Loss: [2.1574295 1.4132793]
Correct: [36.41490927 55.16184404]%
Cross Entropy Loss: [1.9218984 1.1097312]
Correct: [41.57675331 63.09465424]%
Cross Entropy Loss: [1.7856371 0.9692116]
Correct: [44.55615498 68.28102011]%
Cross Entropy Loss: [1.7504174  0.88895535]
Correct: [46.12555174 71.45659637]%
Cross Entropy Loss: [1.5854278  0.74244195]
Correct: [49.32564983 75.1961746 ]%
Cross Entropy Loss: [1.4941695 0.6737008]
Correct: [52.0107896  77.57479156]%
Cross Entropy Loss: [1.4568505 0.650215 ]
Correct: [52.58705248 77.88131437]%
Cross Entropy Loss: [1.4022188  0.55166674]
Correct: [54.57332026 81.5228053 ]%
Cross Entropy Loss: [1.3765494 0.5480042]
Correct: [54.90436488 81.78028445]%
Cross Entropy Loss: [1.2722019  0.48113486]
Correct: [57.69985287 84.30603237]%
Cross Entropy Loss: [1.2381637 0.4440428]
Correct: [59.07307504 85.6424718 ]%
Cross Entropy Loss: [1.1999272  0.40076938]
Correct: [59.67385974 86.97891123]%
Cross Entropy Loss: [1.1747376 0.3969127]
Correct: [61.13290829 87.23639039]%
Cross Entropy Loss: [1.1263044 0.352383 ]
Correct: [62.37126042 88.83030897]%
Cross Entropy Loss: [1.0945936  0.34725538]
Correct: [62.80039235 88.78126533]%
Cross Entropy Loss: [1.0927949 0.3492716]
Correct: [63.10691515 88.83030897]%
Cross Entropy Loss: [1.0781292  0.30218056]
Correct: [63.84256989 90.71848946]%
Cross Entropy Loss: [1.0408287 0.2835253]
Correct: [64.94605199 91.11083865]%
Cross Entropy Loss: [0.97929776 0.29249007]
Correct: [66.65031878 90.64492398]%
Cross Entropy Loss: [0.957546   0.27482376]
Correct: [67.32466896 90.84109858]%
Cross Entropy Loss: [0.9396979  0.25831527]
Correct: [68.24423737 92.22658166]%
Cross Entropy Loss: [0.9471148  0.24204664]
Correct: [67.69249632 92.42275625]%
Cross Entropy Loss: [0.93977016 0.26991615]
Correct: [68.09710642 90.89014223]%
Cross Entropy Loss: [0.9255674  0.24245481]
Correct: [68.80823933 92.39823443]%
Cross Entropy Loss: [0.880603  0.2068333]
Correct: [69.99754782 93.63658656]%
Cross Entropy Loss: [0.8318071  0.19322075]
Correct: [71.33398725 94.1760667 ]%
Cross Entropy Loss: [0.8271154  0.18292095]
Correct: [72.13094654 94.69102501]%
Cross Entropy Loss: [0.8139197 0.179378 ]
Correct: [71.8857283  95.02206964]%
Mean abs weights tensor(0.0282, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 43.73%
From layer 1:
Accuracy: 62.10%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 72.33%
From layer 1:
Accuracy: 96.65%
Fast Classifier Mean Test Accuracy: tensor([45.0751, 62.0053]), Std: tensor([1.2252, 1.8781])
Fast Classifier Mean Train Accuracy: tensor([72.5552, 95.8301]), Std: tensor([1.5979, 1.4504])
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [3.8480463 3.3947315]
Correct: [17.28788622 28.07748897]%
Cross Entropy Loss: [2.4616585 1.682569 ]
Correct: [30.68906327 50.08582639]%
Cross Entropy Loss: [2.0636897 1.2741773]
Correct: [37.54291319 60.2378617 ]%
Cross Entropy Loss: [1.8968732 1.0529089]
Correct: [41.18440412 67.38597352]%
Cross Entropy Loss: [1.7540731  0.88260776]
Correct: [44.28641491 71.76311918]%
Cross Entropy Loss: [1.6600736 0.8081438]
Correct: [47.00833742 74.0926925 ]%
Cross Entropy Loss: [1.5845402 0.7514948]
Correct: [49.47278077 76.29965669]%
Cross Entropy Loss: [1.4956416 0.6225226]
Correct: [51.32417852 79.52427661]%
Cross Entropy Loss: [1.4507385 0.6199757]
Correct: [52.97940167 80.02697401]%
Cross Entropy Loss: [1.384391 0.564674]
Correct: [54.36488475 81.46150074]%
Cross Entropy Loss: [1.3296341  0.45324314]
Correct: [55.50514958 85.45855812]%
Cross Entropy Loss: [1.30213    0.49778086]
Correct: [57.07454635 84.50220696]%
Cross Entropy Loss: [1.2610537  0.39183384]
Correct: [58.04315841 87.08925944]%
Cross Entropy Loss: [1.2320108  0.37599227]
Correct: [59.36733693 88.00882786]%
Cross Entropy Loss: [1.1600598  0.35792643]
Correct: [61.41490927 88.19274154]%
Cross Entropy Loss: [1.0935596  0.33121333]
Correct: [62.06473762 89.63952918]%
Cross Entropy Loss: [1.0809547 0.3185426]
Correct: [63.97743992 90.01961746]%
Cross Entropy Loss: [1.0727597  0.31754342]
Correct: [63.40117705 89.93379107]%
Cross Entropy Loss: [1.0573388 0.2775604]
Correct: [65.20353114 91.08631682]%
Cross Entropy Loss: [1.0161644  0.24532874]
Correct: [65.33840118 92.38597352]%
Cross Entropy Loss: [0.9696506  0.27651575]
Correct: [67.22658166 91.08631682]%
Cross Entropy Loss: [0.98432136 0.3462424 ]
Correct: [66.62579696 89.89700834]%
Cross Entropy Loss: [0.9409854 0.2473308]
Correct: [68.34232467 91.98136341]%
Cross Entropy Loss: [0.9183697  0.18199635]
Correct: [69.11476214 94.72780775]%
Cross Entropy Loss: [0.8984795 0.1811345]
Correct: [69.12702305 94.20058852]%
Cross Entropy Loss: [0.86999714 0.13408804]
Correct: [70.3776361  95.95389897]%
Cross Entropy Loss: [0.8988687  0.27052125]
Correct: [69.61745954 92.14075527]%
Cross Entropy Loss: [0.8641304 0.1990731]
Correct: [70.53702795 93.36684649]%
Cross Entropy Loss: [0.8223216  0.20762241]
Correct: [71.21137813 93.19519372]%
Cross Entropy Loss: [0.80306524 0.12395442]
Correct: [72.64590486 96.13781265]%
Mean abs weights tensor(0.0231, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 44.52%
From layer 1:
Accuracy: 63.52%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 72.14%
From layer 1:
Accuracy: 95.38%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.206088  3.5189412]
Correct: [17.68023541 28.80088279]%
Cross Entropy Loss: [2.4782095 1.8298502]
Correct: [29.85532124 48.11181952]%
Cross Entropy Loss: [2.1422782 1.4196917]
Correct: [36.46395292 58.19028936]%
Cross Entropy Loss: [1.960169  1.0874754]
Correct: [39.70083374 64.94605199]%
Cross Entropy Loss: [1.8213218  0.98169035]
Correct: [43.37910741 69.33545856]%
Cross Entropy Loss: [1.6789528  0.82417023]
Correct: [47.00833742 73.8965179 ]%
Cross Entropy Loss: [1.5771729 0.8443799]
Correct: [49.11721432 74.21530162]%
Cross Entropy Loss: [1.4971353 0.6665225]
Correct: [50.84600294 78.5188818 ]%
Cross Entropy Loss: [1.4465986 0.6179621]
Correct: [52.35409514 79.78175576]%
Cross Entropy Loss: [1.3983774 0.538283 ]
Correct: [54.15644924 82.05002452]%
Cross Entropy Loss: [1.3439674 0.5567146]
Correct: [55.11280039 81.84158901]%
Cross Entropy Loss: [1.3178648  0.44285196]
Correct: [56.48602256 85.40951447]%
Cross Entropy Loss: [1.241287   0.41059518]
Correct: [58.69298676 86.45169201]%
Cross Entropy Loss: [1.2061244  0.38343447]
Correct: [59.78420795 87.00343306]%
Cross Entropy Loss: [1.1600366  0.34978142]
Correct: [60.49534085 88.92839627]%
Cross Entropy Loss: [1.1510956  0.32274026]
Correct: [60.45855812 89.57822462]%
Cross Entropy Loss: [1.0788989  0.32024953]
Correct: [62.18734674 89.63952918]%
Cross Entropy Loss: [1.0813724  0.33582827]
Correct: [63.41343796 89.39431094]%
Cross Entropy Loss: [1.0472956 0.2853518]
Correct: [64.74987739 90.60814125]%
Cross Entropy Loss: [1.0207807  0.31041095]
Correct: [64.72535557 89.89700834]%
Cross Entropy Loss: [0.96917814 0.3011455 ]
Correct: [66.49092692 90.70622854]%
Cross Entropy Loss: [1.0051692  0.22701487]
Correct: [66.23344777 92.31240804]%
Cross Entropy Loss: [0.9415259 0.199434 ]
Correct: [67.54536538 93.56302109]%
Cross Entropy Loss: [0.927842   0.18669309]
Correct: [68.25649828 93.99215302]%
Cross Entropy Loss: [0.8912463  0.23012541]
Correct: [68.95537028 92.48406081]%
Cross Entropy Loss: [0.8806901  0.17567062]
Correct: [69.65424228 94.32319765]%
Cross Entropy Loss: [0.8703813  0.17074762]
Correct: [69.89946052 94.54389407]%
Cross Entropy Loss: [0.8536955 0.1719962]
Correct: [70.34085336 94.47032859]%
Cross Entropy Loss: [0.82452786 0.17609704]
Correct: [71.59146641 94.04119667]%
Cross Entropy Loss: [0.8003086  0.20986572]
Correct: [72.31486023 93.73467386]%
Mean abs weights tensor(0.0230, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 42.67%
From layer 1:
Accuracy: 64.97%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 67.48%
From layer 1:
Accuracy: 96.98%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.1560173 3.367989 ]
Correct: [17.94997548 29.14418833]%
Cross Entropy Loss: [2.590688  1.7177145]
Correct: [28.96027464 49.46051986]%
Cross Entropy Loss: [2.2253058 1.4117813]
Correct: [35.60568906 57.6262874 ]%
Cross Entropy Loss: [2.0219944 1.1347914]
Correct: [39.74987739 65.21579205]%
Cross Entropy Loss: [1.8264041 0.8972822]
Correct: [43.66110839 71.10102992]%
Cross Entropy Loss: [1.735253   0.83923924]
Correct: [45.54928887 72.94016675]%
Cross Entropy Loss: [1.6873991 0.8241558]
Correct: [46.86120647 74.27660618]%
Cross Entropy Loss: [1.5840389  0.63882774]
Correct: [49.36243256 79.02157921]%
Cross Entropy Loss: [1.5228869 0.6021028]
Correct: [51.38548308 80.66454144]%
Cross Entropy Loss: [1.43229    0.48725578]
Correct: [52.67287886 83.86463953]%
Cross Entropy Loss: [1.3550743  0.45841601]
Correct: [55.44384502 84.66159882]%
Cross Entropy Loss: [1.3344808  0.53857064]
Correct: [55.9220206  82.98185385]%
Cross Entropy Loss: [1.2682309  0.44500375]
Correct: [57.60176557 85.26238352]%
Cross Entropy Loss: [1.2394811  0.37625623]
Correct: [58.34968122 87.72682688]%
Cross Entropy Loss: [1.2252802  0.36405498]
Correct: [58.77881314 88.4502207 ]%
Cross Entropy Loss: [1.1619779  0.43156856]
Correct: [61.13290829 85.7405591 ]%
Cross Entropy Loss: [1.1320895  0.34387794]
Correct: [61.61108386 88.59735164]%
Cross Entropy Loss: [1.0682434  0.26920846]
Correct: [63.53604708 91.31927415]%
Cross Entropy Loss: [1.0796478  0.27721244]
Correct: [63.30308975 91.35605689]%
Cross Entropy Loss: [1.0644479  0.30462387]
Correct: [64.4556155  90.43648847]%
Cross Entropy Loss: [1.0150723  0.26614684]
Correct: [65.06866111 91.78518882]%
Cross Entropy Loss: [0.9981913  0.29921976]
Correct: [65.5590976  90.54683668]%
Cross Entropy Loss: [0.95010656 0.24726525]
Correct: [67.15301618 92.00588524]%
Cross Entropy Loss: [0.987556   0.21103159]
Correct: [66.44188328 93.39136832]%
Cross Entropy Loss: [0.92104584 0.2178937 ]
Correct: [68.04806278 92.71701815]%
Cross Entropy Loss: [0.8756506  0.25691646]
Correct: [70.15693968 91.76066699]%
Cross Entropy Loss: [0.87623113 0.16466418]
Correct: [69.91172143 94.92398234]%
Cross Entropy Loss: [0.87307906 0.16258323]
Correct: [69.75232957 94.89946052]%
Cross Entropy Loss: [0.8674003  0.16167463]
Correct: [70.48798431 94.66650319]%
Cross Entropy Loss: [0.81002593 0.14264874]
Correct: [71.787641   95.64737616]%
Mean abs weights tensor(0.0230, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 47.35%
From layer 1:
Accuracy: 65.68%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 73.55%
From layer 1:
Accuracy: 96.52%
  0%|          | 0/30 [00:00<?, ?it/s]
Cross Entropy Loss: [4.0714426 3.244105 ]
Correct: [17.81510544 29.46297205]%
Cross Entropy Loss: [2.455151  1.6656104]
Correct: [31.00784698 50.23295733]%
Cross Entropy Loss: [2.1909482 1.3464797]
Correct: [36.03482099 58.90142227]%
Cross Entropy Loss: [1.9907267 1.1786809]
Correct: [39.40657185 65.31387935]%
Cross Entropy Loss: [1.8365068 0.9466475]
Correct: [42.99901913 69.88719961]%
Cross Entropy Loss: [1.6985853 0.788624 ]
Correct: [46.43207455 75.29426189]%
Cross Entropy Loss: [1.5932205 0.705826 ]
Correct: [48.87199608 76.98626778]%
Cross Entropy Loss: [1.5121889  0.64586574]
Correct: [50.51495831 79.20549289]%
Cross Entropy Loss: [1.5145305  0.57844985]
Correct: [50.93182933 80.88523786]%
Cross Entropy Loss: [1.4117968  0.47179782]
Correct: [52.93035802 84.60029426]%
Cross Entropy Loss: [1.3390776  0.50047755]
Correct: [55.46836685 83.938205  ]%
Cross Entropy Loss: [1.2993658  0.46249327]
Correct: [56.79254537 84.95586072]%
Cross Entropy Loss: [1.3256488  0.44449434]
Correct: [56.41245709 85.56890633]%
Cross Entropy Loss: [1.2111647 0.4104222]
Correct: [59.42864149 86.35360471]%
Cross Entropy Loss: [1.1526072 0.3120318]
Correct: [60.80186366 90.0564002 ]%
Cross Entropy Loss: [1.1626122  0.31269935]
Correct: [60.75282001 89.43109367]%
Cross Entropy Loss: [1.124913  0.2976848]
Correct: [62.00343306 90.0564002 ]%
Cross Entropy Loss: [1.0740774  0.23546451]
Correct: [63.52378617 92.41049534]%
Cross Entropy Loss: [1.0297993  0.23527752]
Correct: [65.27709662 92.60666994]%
Cross Entropy Loss: [1.0638537 0.2707165]
Correct: [64.39431094 90.79205493]%
Cross Entropy Loss: [0.99400276 0.28485528]
Correct: [65.92692496 90.43648847]%
Cross Entropy Loss: [0.96417683 0.20715752]
Correct: [67.01814615 92.99901913]%
Cross Entropy Loss: [0.94441694 0.19730134]
Correct: [67.43501717 93.66110839]%
Cross Entropy Loss: [0.9213695  0.15779217]
Correct: [68.91858754 95.25502697]%
Cross Entropy Loss: [0.9132426 0.2743287]
Correct: [68.66110839 91.20892594]%
Cross Entropy Loss: [0.8812156  0.17188132]
Correct: [69.20058852 94.60519863]%
Cross Entropy Loss: [0.8708519  0.18731025]
Correct: [70.09563512 94.0779794 ]%
Cross Entropy Loss: [0.85586435 0.19063199]
Correct: [70.51250613 93.6733693 ]%
Cross Entropy Loss: [0.83280563 0.14246862]
Correct: [71.17459539 95.48798431]%
Cross Entropy Loss: [0.8053354  0.20717162]
Correct: [72.2903384  93.53849926]%
Mean abs weights tensor(0.0229, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 41.70%
From layer 1:
Accuracy: 61.48%
  0%|          | 0/128 [00:00<?, ?it/s]
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
Input In [9], in <cell line: 1>()
     14     print('Mean abs weights', out_projs[-1].out_proj.weight.abs().mean())
     15     test_accs.append(get_accuracy(SNN, out_projs, test_loader, device, cat=cat)[0])
---> 16     train_accs.append(get_accuracy(SNN, out_projs, train_loader, device, cat=cat)[0])
     17 test_accs = torch.stack([torch.tensor(ta) for ta in test_accs])
     18 train_accs = torch.stack([torch.tensor(ta) for ta in train_accs])

File ~/ownCloud/ETH/Master/Project_2/SNN_CLAPP/utils.py:155, in get_accuracy(SNN, out_projs, dataloader, device, cat)
    153     out, mem = out_proj(torch.cat(spk_step[:i+1], axis=-1), target)
    154 else:
--> 155     out, mem = out_proj(spk_step[i], target)
    156 if step == inp.shape[1]-1:
    157     logits[i] = mem

File ~/miniconda3/lib/python3.9/site-packages/torch/nn/modules/module.py:1501, in Module._call_impl(self, *args, **kwargs)
   1496 # If we don't have any hooks, we want to skip the rest of the logic in
   1497 # this function, and just call forward.
   1498 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
   1499         or _global_backward_pre_hooks or _global_backward_hooks
   1500         or _global_forward_hooks or _global_forward_pre_hooks):
-> 1501     return forward_call(*args, **kwargs)
   1502 # Do not call functions when jit is used
   1503 full_backward_hooks, non_full_backward_hooks = [], []

File ~/ownCloud/ETH/Master/Project_2/SNN_CLAPP/model.py:298, in simple_out.forward(self, inp, target)
    296 def forward(self, inp, target):
    297     cur = self.out_proj(inp)
--> 298     spk, self.mem = self.lif(cur, self.mem)
    299     if self.training:
    300         self._update_trace(inp)

File ~/miniconda3/lib/python3.9/site-packages/torch/nn/modules/module.py:1501, in Module._call_impl(self, *args, **kwargs)
   1496 # If we don't have any hooks, we want to skip the rest of the logic in
   1497 # this function, and just call forward.
   1498 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
   1499         or _global_backward_pre_hooks or _global_backward_hooks
   1500         or _global_forward_hooks or _global_forward_pre_hooks):
-> 1501     return forward_call(*args, **kwargs)
   1502 # Do not call functions when jit is used
   1503 full_backward_hooks, non_full_backward_hooks = [], []

File ~/miniconda3/lib/python3.9/site-packages/snntorch/_neurons/leaky.py:180, in Leaky.forward(self, input_, mem)
    173 # TO-DO: alternatively, we could do torch.exp(-1 /
    174 # self.beta.clamp_min(0)),
    175 # giving actual time constants instead of values in [0, 1] as
    176 # initial beta
    177 # beta = self.beta.clamp(0, 1)
    179 if not self.init_hidden:
--> 180     self.reset = self.mem_reset(mem)
    181     mem = self._build_state_function(input_, mem)
    183     if self.state_quant:

File ~/miniconda3/lib/python3.9/site-packages/snntorch/_neurons/neurons.py:106, in SpikingNeuron.mem_reset(self, mem)
    103 def mem_reset(self, mem):
    104     """Generates detached reset signal if mem > threshold.
    105     Returns reset."""
--> 106     mem_shift = mem - self.threshold
    107     reset = self.spike_grad(mem_shift).clone().detach()
    109     return reset

KeyboardInterrupt: 
In [ ]:
# run all checkpoints
if not args.augment: 
    last_ckp = 20
    test_accs_ckpt = []
    train_accs_ckpt = []
    epochs = []
    while True:
        print(f'Checkpoint {last_ckp}')
        SNN_ckp = EchoSpike(args.n_inputs, args.n_hidden, beta=args.beta, device=device, recurrency_type=args.recurrency_type, online=args.online).to(device)
        try:
            print(model_name[:-3] + f'_epoch{last_ckp}.pt')
            state_dict = torch.load(folder+model_name[:-3] + f'_epoch{last_ckp}.pt', map_location=device)
            # state_dict = {key.replace('clapp', 'layers'):value for key, value in state_dict.items()}
            # torch.save(state_dict, folder+model_name + f'_epoch{last_ckp}.pt')
            SNN_ckp.load_state_dict(state_dict)
        except:
            if last_ckp > 1500:
                break
            else:
                last_ckp += 20
                continue
        epochs.append(last_ckp)
        last_ckp += 20
        snn_samples, targets = get_samples(SNN_ckp, train_loader, args.n_hidden, device)
        cat = True
        with torch.no_grad():
            out_projs, acc, losses_out = train_out_proj_fast(SNN_ckp, args, 60, 60, snn_samples, train_loader.y, cat=cat, lr=1e-4, weight_decay=1)
        test_accs_ckpt.append(torch.tensor(get_accuracy(SNN_ckp, out_projs, test_loader, device, cat=cat)[0]))
        train_accs_ckpt.append(torch.tensor(get_accuracy(SNN_ckp, out_projs, train_loader, device, cat=cat)[0])) 
    test_accs_ckpt = torch.stack(test_accs_ckpt)
    train_accs_ckpt = torch.stack(train_accs_ckpt)
    # save the results
    torch.save(torch.stack([torch.tensor(tac) for tac in test_accs_ckpt]), folder+model_name[:-3]+'_test_accs_ckpt.pt')
    torch.save(torch.stack([torch.tensor(tac) for tac in train_accs_ckpt]), folder+model_name[:-3]+'_train_accs_ckpt.pt')
In [ ]:
# plot train and test accuracy over time
plt.figure()
for i in range(test_accs_ckpt.shape[-1]):
    if i == 0:
        plt.plot(epochs, 100*test_accs_ckpt[:,i], color=color_list[i], label=f'Directly from inputs')
        plt.plot(epochs, 100*train_accs_ckpt[:,i], color=color_list[i], linestyle='--')
    else:
        plt.plot(epochs, 100*test_accs_ckpt[:,i], color=color_list[i], label=f'Layer {i}')
        plt.plot(epochs, 100*train_accs_ckpt[:,i], color=color_list[i], linestyle='--')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend()
Out[ ]:
<matplotlib.legend.Legend at 0x7f4d60de9850>
In [ ]:
if not args.augment:
    # if snn_samples already exist, don't recompute
    try:
        snn_samples
    except:
        snn_samples, targets = get_samples(SNN, train_loader, args.n_hidden, device)
    for cat in [False, True]:
        out_projs_closed = train_out_proj_closed_form(args, snn_samples, targets, cat=cat)
        test_acc_closed, _ = get_accuracy(SNN, out_projs_closed, test_loader, device, cat=cat)
        train_acc_closed, _ = get_accuracy(SNN, out_projs_closed, train_loader, device, cat=cat)

        # grouped Bar plot the Accuracies of the different layers both during training and testing
        sns.set_theme(style="whitegrid")
        labels = ['From Inputs Directly', *[f'Until Layer {i+1}' for i in range(len(SNN.layers))]]
        if not cat:
            labels = ['From Inputs Directly', *[f'From Layer {i+1}' for i in range(len(SNN.layers))]]
        x = np.arange(len(labels))  # the label locations
        width = 0.35  # the width of the bars
        fig, ax = plt.subplots()
        rects1 = ax.bar(x - width/2, 100*torch.tensor(test_acc_closed), width, label='Test Accuracy', color=color_list[0])
        rects2 = ax.bar(x + width/2, 100*torch.tensor(train_acc_closed), width, label='Train Accuracy', color=color_list[1])
        # remove horizontal lines and spines
        ax.spines['right'].set_visible(False)
        ax.spines['left'].set_visible(False)
        ax.xaxis.grid(False)
        plt.xticks(np.arange(len(out_projs_closed)), labels, rotation=45)
        plt.legend()
        plt.ylabel('Accuracy [%]')
        plt.ylim([25, 100])
        if cat:
            accs['train_closed_cat'] = train_acc_closed
            accs['test_closed_cat'] = test_acc_closed
        else:
            accs['train_closed'] = train_acc_closed
            accs['test_closed'] = test_acc_closed
    with open('final_results/' + model_name[:-3] + '_accuracies.pkl', 'wb') as f:
        pickle.dump(accs, f)
(20, 700) 0.010724677 -0.010629931
(20, 450) 0.15072547 -0.08645948
(20, 450) 0.35159343 -0.3516128
(20, 450) 20.087902 -23.734768
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 28.89%
From layer 1:
Accuracy: 61.48%
From layer 2:
Accuracy: 70.89%
From layer 3:
Accuracy: 70.19%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 66.96%
From layer 1:
Accuracy: 84.22%
From layer 2:
Accuracy: 87.21%
From layer 3:
Accuracy: 80.90%
(20, 700) 0.010724677 -0.010629931
(20, 1150) 0.15413637 -0.10144029
(20, 1600) 0.2811037 -0.36118773
(20, 2050) 0.64799124 -0.65297925
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 28.89%
From layer 1:
Accuracy: 61.93%
From layer 2:
Accuracy: 73.59%
From layer 3:
Accuracy: 75.57%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 66.96%
From layer 1:
Accuracy: 94.26%
From layer 2:
Accuracy: 97.00%
From layer 3:
Accuracy: 97.89%
In [ ]:
if args.augment:
    # plot some training characteristics
    print(f'Accuracy of last quarter: {100*acc[-len(acc)//4:].mean(axis=0)}%')
    plt.figure()
    for i in range(len(acc[0])):
        plt.plot(np.asarray(acc)[:,i]*100, color=color_list[i])
    plt.ylabel('Accuracy [%]')
    plt.xlabel('Training Step [x500]')
    labels = ['From Inputs directly', *[f'From Layer {i+1}' for i in range(len(SNN.layers))]]
    plt.legend(labels)
    plt.ylim([65, 95])
    plt.figure()
    print(losses_out.shape)
    for i in range(losses_out.shape[1]):
        plt.plot(np.arange(len(losses_out))/len(train_loader), savgol_filter(losses_out[:,i], 99, 1), label=labels[i], color=color_list[i])
    plt.ylabel('Cross Entropy Loss')
    plt.xlabel('Training Step')
    plt.ylim([0.15, 1.0])
    plt.legend();

Get output projection Accuracy on test set¶

In [ ]:
test_acc, pred_matrix = get_accuracy(SNN, out_projs, test_loader, device, cat=cat)
plt.figure()
plt.plot(100*np.asarray(test_acc))
plt.ylabel('Accuracy [%]')
plt.xlabel('Layer')

plt.figure()
plt.imshow(pred_matrix, origin='lower')
plt.title('Prediction Matrix for the final layer')
plt.xlabel('Prediction')
plt.ylabel('Target')
plt.xticks([i for i in range(args.n_outputs)])
plt.yticks([i for i in range(args.n_outputs)])
plt.colorbar();
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 48.63%
From layer 1:
Accuracy: 72.00%
From layer 2:
Accuracy: 80.70%
From layer 3:
Accuracy: 82.86%
From layer 4:
Accuracy: 84.32%
In [ ]:
from utils import get_accuracy
if args.augment:
    test_acc, _ = get_accuracy(SNN, out_projs, test_loader, device, cat=cat)
    train_acc, _ = get_accuracy(SNN, out_projs, train_loader, device, cat=cat) 
else:
    test_acc = torch.mean(test_accs, dim=0)
    print(test_acc)
    train_acc = torch.mean(train_accs, dim=0)
# grouped Bar plot the Accuracies of the different layers both during training and testing
sns.set_theme(style="whitegrid")
labels = ['From Inputs Directly', *[f'Until Layer {i+1}' for i in range(len(SNN.layers))]]
if not cat:
    labels = ['From Inputs Directly', *[f'From Layer {i+1}' for i in range(len(SNN.layers))]]
x = np.arange(len(labels))  # the label locations
width = 0.35  # the width of the bars
fig, ax = plt.subplots()
rects1 = ax.bar(x - width/2, 100*torch.tensor(test_acc), width, label='Test Accuracy', color=color_list[0])
rects2 = ax.bar(x + width/2, 100*torch.tensor(train_acc), width, label='Train Accuracy', color=color_list[1])
if not args.augment:
    ax.errorbar(x - width/2, 100*test_acc, yerr=100*torch.std(test_accs, dim=0), fmt='none', capsize=6, color=color_list[3])
    ax.errorbar(x + width/2, 100*train_acc, yerr=100*torch.std(train_accs, dim=0), fmt='none', capsize=6, color=color_list[3])
# remove horizontal lines and spines
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.xaxis.grid(False)
plt.xticks(np.arange(len(out_projs)), labels, rotation=45)
plt.legend()
plt.ylabel('Accuracy [%]')
plt.ylim([25, 100])
#plt.title('SHD Accuracy');
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 48.63%
From layer 1:
Accuracy: 72.00%
From layer 2:
Accuracy: 80.70%
From layer 3:
Accuracy: 82.86%
From layer 4:
Accuracy: 84.32%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 45.95%
From layer 1:
Accuracy: 78.62%
From layer 2:
Accuracy: 86.68%
From layer 3:
Accuracy: 88.97%
From layer 4:
Accuracy: 90.45%
Out[ ]:
(25.0, 100.0)

Few Shot Learning (discontinued)¶

In [ ]:
# Randomly select k sample of each class and save the spiking activity
n_outputs = 20
n_repeats = 1
k = 20
fewshot_accuracies = torch.zeros((n_repeats, len(SNN.layers)))
for n in range(n_repeats):
    SNN.reset(0)
    one_shot_samples = torch.zeros(n_outputs, n_time_bins, n_inputs)
    one_shot_spks = torch.zeros(n_outputs, len(SNN.layers), n_hidden[0])
    for i in trange(n_outputs):
        for j in range(k):
            img, _ = train_loader.next_item(i, contrastive=False)
            one_shot_samples[i] = img.squeeze()
            for t in range(n_time_bins):
                logits, mem_his, clapp_loss = SNN(img[t].float(), 0) 
                one_shot_spks[i] += torch.stack(logits).squeeze()

    def metric(spk, one_shot):
        dists = torch.zeros(spk.shape[0], args.n_outputs)
        for i in range(args.n_outputs):
            one_shot_i = one_shot[i] / one_shot[i].sum()
            dists[:, i] = torch.einsum('bi, i->b' , spk, one_shot_i)
        return dists

    def get_predictions(spks):
        preds = torch.zeros(len(spks), spks[0].shape[0])
        # for each layer get the prediction
        for i in range(len(spks)):
            dists = metric(spks[i], one_shot_spks[:,i])
            preds[i] = dists.argmax(axis=-1)
        return preds
    dataset = test_loader
    batch = int(len(dataset)/100)
    correct_oneshot = torch.zeros(len(SNN.layers))
    SNN.eval()
    pred_matrix_oneshot = torch.zeros(n_outputs, n_outputs)
    for idx in trange(0, len(dataset), batch):
        SNN.reset(0)
        inp, target = dataset.x[idx:idx+batch], dataset.y[idx:idx+batch]
        logits = torch.zeros(len(SNN.layers), inp.shape[0], n_hidden[0])
        for step in range(inp.shape[1]):
            data_step = inp[:,step].float().to(device)
            spk_step, _, _ = SNN(data_step, 0)
            logits += torch.stack(spk_step)
        preds = get_predictions(logits)
        for i in range(preds.shape[0]):
            correct_oneshot[i] += int((preds[i] == target).sum())
        # for the last layer create the prediction matrix
        for j in range(preds.shape[1]):
            pred_matrix_oneshot[int(target[j]), int(preds[-1, j])] += 1
    correct_oneshot /= len(dataset)
    for i in range(len(SNN.layers)):
        print(f'From layer {i+1}:')
        print(f'Accuracy: {100*correct_oneshot[i]:.2f}%')
        fewshot_accuracies[n, i] = correct_oneshot[i]
    plt.imshow(pred_matrix_oneshot, origin='lower')
    plt.title('Prediction Matrix for the final layer')
    plt.xlabel('Prediction')
    plt.ylabel('Target')
    plt.xticks([i for i in range(n_outputs)])
    plt.yticks([i for i in range(n_outputs)])
    plt.colorbar();
    plt.figure()
In [ ]:
# Boxplot of the accuracies
plt.figure()
sns.set_style("whitegrid")
g = sns.boxplot(data=fewshot_accuracies*100)
# remove left spines
sns.despine(left=True)
plt.xticks(np.arange(len(SNN.layers)), [f'Layer {i+1}' for i in range(len(SNN.layers))])
plt.ylabel('Few-Shot Test Accuracy [%]')
plt.ylim([0, 100])
print(f'Average Accuracy: {100*fewshot_accuracies.mean(axis=0)}%')
print(f'Maximum Accuracy: {fewshot_accuracies.max(axis=0)}%')